http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/DetailedTableInfoParser.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/DetailedTableInfoParser.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/DetailedTableInfoParser.java new file mode 100644 index 0000000..b526789 --- /dev/null +++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/DetailedTableInfoParser.java @@ -0,0 +1,71 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.view.hive20.internal.parsers; + +import org.apache.ambari.view.hive20.client.Row; +import org.apache.ambari.view.hive20.internal.dto.DetailedTableInfo; + +import java.util.List; +import java.util.Map; + +/** + * + */ +public class DetailedTableInfoParser extends AbstractTableMetaParser<DetailedTableInfo> { + /* + | # Detailed Table Information | NULL | NULL | + | Database: | default | NULL | + | Owner: | admin | NULL | + | CreateTime: | Mon Aug 01 13:28:42 UTC 2016 | NULL | + | LastAccessTime: | UNKNOWN | NULL | + | Protect Mode: | None | NULL | + | Retention: | 0 | NULL | + | Location: | hdfs://c6401.ambari.apache.org:8020/apps/hive/warehouse/geolocation | NULL | + | Table Type: | MANAGED_TABLE | NULL | + | Table Parameters: | NULL | NULL | + | | COLUMN_STATS_ACCURATE | {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"column1\":\"true\",\"column2\":\"true\",\"column3\":\"true\",\"column4\":\"true\",\"column5\":\"true\",\"column6\":\"true\",\"column7\":\"true\",\"column8\":\"true\",\"column9\":\"true\",\"column10\":\"true\"}} | + | | numFiles | 1 | + | | numRows | 8001 | + | | rawDataSize | 7104888 | + | | totalSize | 43236 | + | | transient_lastDdlTime | 1479819460 | + | | NULL | NULL | + */ + public DetailedTableInfoParser() { + super("# Detailed Table Information", null, ""); + } + + @Override + public DetailedTableInfo parse(List<Row> rows) { + DetailedTableInfo info = new DetailedTableInfo(); + Map<String, Object> parsedSection = parseSection(rows); + info.setDbName(getString(parsedSection, "Database:")); + info.setOwner(getString(parsedSection, "Owner:")); + info.setCreateTime(getString(parsedSection, "CreateTime:")); + info.setLastAccessTime(getString(parsedSection, "LastAccessTime:")); + info.setRetention(getString(parsedSection, "Retention:")); + info.setLocation(getString(parsedSection, "Location:")); + info.setTableType(getString(parsedSection, "Table Type:")); + + info.setParameters(getMap(parsedSection, "Table Parameters:")); + + return info; + } + +}
http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/ParserUtils.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/ParserUtils.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/ParserUtils.java new file mode 100644 index 0000000..b4adf5c --- /dev/null +++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/ParserUtils.java @@ -0,0 +1,52 @@ +/* +* Licensed to the Apache Software Foundation (ASF) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The ASF licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package org.apache.ambari.view.hive20.internal.parsers; + +import java.util.ArrayList; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class ParserUtils { + + public static final String DATA_TYPE_REGEX = "\\s*([^() ]+)\\s*(\\s*\\(\\s*([0-9]+)\\s*(\\s*,\\s*([0-9]+))?\\s*\\)\\s*)?\\s*"; + + /** + * @param columnDataTypeString : the string that needs to be parsed as a datatype example : decimal(10,3) + * @return a list of string containing type, precision and scale in that order if present, null otherwise + */ + public static List<String> parseColumnDataType(String columnDataTypeString) { + List<String> typePrecisionScale = new ArrayList<>(3); + + Pattern pattern = Pattern.compile(DATA_TYPE_REGEX); + Matcher matcher = pattern.matcher(columnDataTypeString); + + if (matcher.find()) { + typePrecisionScale.add(matcher.group(1)); + typePrecisionScale.add(matcher.group(3)); + typePrecisionScale.add(matcher.group(5)); + }else{ + typePrecisionScale.add(null); + typePrecisionScale.add(null); + typePrecisionScale.add(null); + } + + return typePrecisionScale; + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/PartitionInfoParser.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/PartitionInfoParser.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/PartitionInfoParser.java new file mode 100644 index 0000000..fbcb6e8 --- /dev/null +++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/PartitionInfoParser.java @@ -0,0 +1,76 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.view.hive20.internal.parsers; + +import org.apache.ambari.view.hive20.client.Row; +import org.apache.ambari.view.hive20.internal.dto.ColumnInfo; +import org.apache.ambari.view.hive20.internal.dto.PartitionInfo; +import org.apache.parquet.Strings; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** + * + */ +public class PartitionInfoParser extends AbstractTableMetaParser<PartitionInfo> { + private static final Logger LOG = LoggerFactory.getLogger(PartitionInfoParser.class); + + /* + General format + | # Partition Information | NULL | NULL | + | # col_name | data_type | comment | + | | NULL | NULL | + | dt | string | | + | country | string | | + | | NULL | NULL | + */ + + public PartitionInfoParser() { + super("# Partition Information", "# col_name", "", ""); + } + + @Override + public PartitionInfo parse(List<Row> rows) { + List<ColumnInfo> columns = new ArrayList<>(); + + + Map<String, Object> parsedSection = parseSection(rows); + for(Object obj: parsedSection.values()) { + if(obj instanceof Entry) { + Entry entry = (Entry)obj; + String typeInfo = entry.getValue(); + // parse precision and scale + List<String> typePrecisionScale = ParserUtils.parseColumnDataType(typeInfo); + String datatype = typePrecisionScale.get(0); + String precisionString = typePrecisionScale.get(1); + String scaleString = typePrecisionScale.get(2); + Integer precision = !Strings.isNullOrEmpty(precisionString) ? Integer.valueOf(precisionString.trim()): null; + Integer scale = !Strings.isNullOrEmpty(scaleString) ? Integer.valueOf(scaleString.trim()): null; + ColumnInfo columnInfo = new ColumnInfo(entry.getName(), datatype, precision, scale, entry.getComment()); + columns.add(columnInfo); + LOG.debug("found partition column definition : {}", columnInfo); + } + } + return columns.size() > 0? new PartitionInfo(columns) : null; + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/StorageInfoParser.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/StorageInfoParser.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/StorageInfoParser.java new file mode 100644 index 0000000..09fcfd0 --- /dev/null +++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/StorageInfoParser.java @@ -0,0 +1,100 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.view.hive20.internal.parsers; + +import com.google.common.base.Function; +import com.google.common.base.Predicate; +import com.google.common.collect.FluentIterable; +import org.apache.ambari.view.hive20.client.Row; +import org.apache.ambari.view.hive20.internal.dto.ColumnOrder; +import org.apache.ambari.view.hive20.internal.dto.Order; +import org.apache.ambari.view.hive20.internal.dto.StorageInfo; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.annotation.Nullable; +import java.util.Arrays; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Parses the Storage Information from the describe formatted output. + */ +public class StorageInfoParser extends AbstractTableMetaParser<StorageInfo> { + private static final Logger LOG = LoggerFactory.getLogger(StorageInfoParser.class); + + + public StorageInfoParser() { + super("# Storage Information", null, ""); + } + + @Override + public StorageInfo parse(List<Row> rows) { + StorageInfo info = new StorageInfo(); + Map<String, Object> parsedSection = parseSection(rows); + + info.setSerdeLibrary(getString(parsedSection, "SerDe Library:")); + info.setInputFormat(getString(parsedSection, "InputFormat:")); + info.setOutputFormat(getString(parsedSection, "OutputFormat:")); + info.setCompressed(getString(parsedSection, "Compressed:")); + info.setNumBuckets(getString(parsedSection, "Num Buckets:")); + info.setBucketCols(parseBucketColumns(getString(parsedSection, "Bucket Columns:"))); + info.setSortCols(parseSortCols(getString(parsedSection, "Sort Columns:"))); + info.setParameters(getMap(parsedSection, "Storage Desc Params:")); + + return info; + } + + private List<String> parseBucketColumns(String string) { + String[] strings = string.split("[\\[\\],]"); + return FluentIterable.from(Arrays.asList(strings)).filter(new Predicate<String>() { + @Override + public boolean apply(@Nullable String input) { + return !(null == input || input.trim().length() == 0) ; + } + }).transform(new Function<String, String>() { + @Override + public String apply(String input) { + return input.trim(); + } + }).toList(); + } + + private List<ColumnOrder> parseSortCols(String str) { + String patternStr = "Order\\s*\\(\\s*col\\s*:\\s*([^,]+)\\s*,\\s*order\\s*:\\s*(\\d)\\s*\\)"; + Pattern pattern = Pattern.compile(patternStr); + + Matcher matcher = pattern.matcher(str); + + LinkedList<ColumnOrder> list = new LinkedList<>(); + while(matcher.find()){ + String colName = matcher.group(1); + String orderString = matcher.group(2); + Order order = Order.fromOrdinal(Integer.valueOf(orderString)); + ColumnOrder co = new ColumnOrder(colName, order); + list.add(co); + LOG.debug("columnOrder : {}", co); + } + + return list; + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaParser.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaParser.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaParser.java new file mode 100644 index 0000000..aae23c8 --- /dev/null +++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaParser.java @@ -0,0 +1,30 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.view.hive20.internal.parsers; + +import org.apache.ambari.view.hive20.client.Row; + +import java.util.List; + +/** + * + */ +public interface TableMetaParser<T> { + T parse(String database, String table, List<Row> createTableStatementRows, List<Row> describeFormattedRows); +} http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaParserImpl.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaParserImpl.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaParserImpl.java new file mode 100644 index 0000000..5cae34a --- /dev/null +++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaParserImpl.java @@ -0,0 +1,79 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.view.hive20.internal.parsers; + +import org.apache.ambari.view.hive20.client.Row; +import org.apache.ambari.view.hive20.internal.dto.ColumnInfo; +import org.apache.ambari.view.hive20.internal.dto.DetailedTableInfo; +import org.apache.ambari.view.hive20.internal.dto.PartitionInfo; +import org.apache.ambari.view.hive20.internal.dto.StorageInfo; +import org.apache.ambari.view.hive20.internal.dto.TableMeta; +import org.apache.ambari.view.hive20.internal.dto.ViewInfo; + +import javax.inject.Inject; +import java.util.List; + +/** + * + */ +public class TableMetaParserImpl implements TableMetaParser<TableMeta> { + + @Inject + private CreateTableStatementParser createTableStatementParser; + + @Inject + private ColumnInfoParser columnInfoParser; + + @Inject + private PartitionInfoParser partitionInfoParser; + + @Inject + private DetailedTableInfoParser detailedTableInfoParser; + + @Inject + private StorageInfoParser storageInfoParser; + + @Inject + private ViewInfoParser viewInfoParser; + + + + @Override + public TableMeta parse(String database, String table, List<Row> createTableStatementRows, List<Row> describeFormattedRows) { + String createTableStatement = createTableStatementParser.parse(createTableStatementRows); + DetailedTableInfo tableInfo = detailedTableInfoParser.parse(describeFormattedRows); + StorageInfo storageInfo = storageInfoParser.parse(describeFormattedRows); + List<ColumnInfo> columns = columnInfoParser.parse(describeFormattedRows); + PartitionInfo partitionInfo = partitionInfoParser.parse(describeFormattedRows); + ViewInfo viewInfo = viewInfoParser.parse(describeFormattedRows); + + + TableMeta meta = new TableMeta(); + meta.setId(database + "/" + table); + meta.setDatabase(database); + meta.setTable(table); + meta.setColumns(columns); + meta.setDdl(createTableStatement); + meta.setPartitionInfo(partitionInfo); + meta.setDetailedInfo(tableInfo); + meta.setStorageInfo(storageInfo); + meta.setViewInfo(viewInfo); + return meta; + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaSectionParser.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaSectionParser.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaSectionParser.java new file mode 100644 index 0000000..7d5e170 --- /dev/null +++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/TableMetaSectionParser.java @@ -0,0 +1,30 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.view.hive20.internal.parsers; + +import org.apache.ambari.view.hive20.client.Row; + +import java.util.List; + +/** + * + */ +public interface TableMetaSectionParser<T> { + T parse(List<Row> rows); +} http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/ViewInfoParser.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/ViewInfoParser.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/ViewInfoParser.java new file mode 100644 index 0000000..ba0b069 --- /dev/null +++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/parsers/ViewInfoParser.java @@ -0,0 +1,47 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.view.hive20.internal.parsers; + +import org.apache.ambari.view.hive20.client.Row; +import org.apache.ambari.view.hive20.internal.dto.ViewInfo; + +import java.util.List; +import java.util.Map; + +/** + * Parses the view Information from the describe formatted output. + */ +public class ViewInfoParser extends AbstractTableMetaParser<ViewInfo>{ + + public ViewInfoParser() { + super("# View Information", null, ""); + } + + @Override + public ViewInfo parse(List<Row> rows) { + ViewInfo info = new ViewInfo(); + Map<String, Object> parsedSection = parseSection(rows); + if(parsedSection.size() == 0) { + return null; // View Information is not present + } + info.setOriginalText(getString(parsedSection, "View Original Text:")); + info.setExtendedText(getString(parsedSection, "View Expanded Text:")); + return info; + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/AlterTableQueryGenerator.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/AlterTableQueryGenerator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/AlterTableQueryGenerator.java new file mode 100644 index 0000000..73f8266 --- /dev/null +++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/AlterTableQueryGenerator.java @@ -0,0 +1,365 @@ +/* +* Licensed to the Apache Software Foundation (ASF) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The ASF licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package org.apache.ambari.view.hive20.internal.query.generators; + +import com.google.common.base.Function; +import com.google.common.base.Joiner; +import com.google.common.base.Optional; +import com.google.common.base.Predicate; +import com.google.common.base.Strings; +import com.google.common.collect.FluentIterable; +import org.apache.ambari.view.hive20.internal.dto.ColumnInfo; +import org.apache.ambari.view.hive20.internal.dto.ColumnOrder; +import org.apache.ambari.view.hive20.internal.dto.TableMeta; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.annotation.Nullable; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; + +import static org.apache.ambari.view.hive20.internal.query.generators.QueryGenerationUtils.isNullOrEmpty; + +public class AlterTableQueryGenerator implements QueryGenerator { + private static final Logger LOG = LoggerFactory.getLogger(AlterTableQueryGenerator.class); + + private final TableMeta oldMeta; + private final TableMeta newMeta; + + public AlterTableQueryGenerator(TableMeta oldMeta, TableMeta newMeta) { + this.oldMeta = oldMeta; + this.newMeta = newMeta; + } + + public TableMeta getOldMeta() { + return oldMeta; + } + + public TableMeta getNewMeta() { + return newMeta; + } + + public String getQueryPerfix() { + return new StringBuffer(" ALTER TABLE ") + .append("`").append(this.getOldMeta().getDatabase()).append(".").append(this.getOldMeta().getTable().trim()).append("` ").toString(); + } + + public Optional<String> getQuery() { + List<Optional<String>> queries = new LinkedList<>(); + + // TODO: rename of table name has to be handled separately as other queries depend on new name. +// Optional<String> tableRenameQuery = this.generateTableRenameQuery(this.getOldMeta().getDatabase(), +// this.getOldMeta().getTable(), this.getNewMeta().getDatabase(), this.getNewMeta().getTable()); +// queries.add(tableRenameQuery); + + Optional<List<Optional<String>>> columnQuery = this.generateColumnQuery(); + if (columnQuery.isPresent()) { + queries.addAll(columnQuery.get()); + } + + if (null != this.getNewMeta().getDetailedInfo() && null != this.getNewMeta().getDetailedInfo()) { + Optional<String> tablePropertiesQuery = this.generateTablePropertiesQuery(this.getOldMeta().getDetailedInfo().getParameters(), + this.getNewMeta().getDetailedInfo().getParameters()); + queries.add(tablePropertiesQuery); + } + + if (null != this.getOldMeta().getStorageInfo() && null != this.getNewMeta().getStorageInfo()) { + String oldSerde = this.getOldMeta().getStorageInfo().getSerdeLibrary(); + String newSerde = this.getNewMeta().getStorageInfo().getSerdeLibrary(); + Map<String, String> oldParameters = this.getOldMeta().getStorageInfo().getParameters(); + Map<String, String> newParameters = this.getNewMeta().getStorageInfo().getParameters(); + + Optional<String> serdeProperties = this.generateSerdeQuery(oldSerde, oldParameters, newSerde, newParameters); + queries.add(serdeProperties); + } + + if (null != this.getOldMeta().getStorageInfo() && null != this.getNewMeta().getStorageInfo()) { + List<String> oldBucketCols = this.getOldMeta().getStorageInfo().getBucketCols(); + List<ColumnOrder> oldSortCols = this.getOldMeta().getStorageInfo().getSortCols(); + String oldNumBuckets = this.getOldMeta().getStorageInfo().getNumBuckets(); + + List<String> newBucketCols = this.getNewMeta().getStorageInfo().getBucketCols(); + List<ColumnOrder> newSortCols = this.getNewMeta().getStorageInfo().getSortCols(); + String newNumBuckets = this.getNewMeta().getStorageInfo().getNumBuckets(); + + Optional<String> storagePropertyQuery = this.generateStoragePropertyQuery(oldBucketCols, oldSortCols, oldNumBuckets, newBucketCols, newSortCols, newNumBuckets); + queries.add(storagePropertyQuery); + } + + + List<String> queryList = FluentIterable.from(queries).filter(new Predicate<Optional<String>>() { + @Override + public boolean apply(Optional<String> input) { + return input.isPresent(); + } + }).transform(new Function<Optional<String>, String>() { + @Override + public String apply(Optional<String> input) { + return input.get(); + } + }).toList(); + + if (!queryList.isEmpty()) { + return Optional.of(Joiner.on(";\n").join(queryList)); + } else { + return Optional.absent(); + } + + } + + Optional<List<Optional<String>>> generateColumnQuery() { + List<ColumnInfo> oldColumns = this.getOldMeta().getColumns(); + List<ColumnInfo> newColumns = this.getNewMeta().getColumns(); + boolean cascade = null != this.getNewMeta().getPartitionInfo() && !isNullOrEmpty(this.getNewMeta().getPartitionInfo().getColumns()); + Optional<List<String>> queries = createColumnQueries(oldColumns, newColumns, cascade); + if (queries.isPresent()) { + List<Optional<String>> queryList = FluentIterable.from(queries.get()).transform(new Function<String, Optional<String>>() { + @Override + public Optional<String> apply(String input) { + return Optional.of(getQueryPerfix() + input); + } + }).toList(); + return Optional.of(queryList); + } else { + return Optional.absent(); + } + } + + /** + * TODO : this uses CASCADE. confirm that it is expected. + * ALTER TABLE table_name [PARTITION partition_spec] CHANGE [COLUMN] col_old_name col_new_name column_type + * [COMMENT col_comment] [FIRST|AFTER column_name] [CASCADE|RESTRICT]; + * <p> + * ALTER TABLE table_name + * [PARTITION partition_spec] -- (Note: Hive 0.14.0 and later) + * ADD|REPLACE COLUMNS (col_name data_type [COMMENT col_comment], ...) + * [CASCADE|RESTRICT] -- (Note: Hive 0.15.0 and later) + * + * @param oldColumns + * @param newColumns + * @return + */ + static Optional<List<String>> createColumnQueries(List<ColumnInfo> oldColumns, List<ColumnInfo> newColumns, boolean cascade) { + if (isNullOrEmpty(oldColumns) || isNullOrEmpty(newColumns)) { + LOG.error("oldColumns = {} or newColumns = {} was null.", oldColumns, newColumns); + throw new IllegalArgumentException("Old or new columns cannot be empty."); + } + + //TODO : removing columns not allowed right now. handle this later using REPLACE for native serde or error. + if (oldColumns.size() > newColumns.size()) { + LOG.error("removing columns from hive table is not supported yet."); + throw new IllegalArgumentException("removing columns is not allowed."); + } + + List<String> queries = new LinkedList<>(); + int i = 0; + boolean foundChange = false; + for (; i < oldColumns.size(); i++) { + ColumnInfo oldColumn = oldColumns.get(i); + ColumnInfo newColumn = newColumns.get(i); + + if (!oldColumn.equals(newColumn)) { + foundChange = true; + StringBuilder queryBuilder = new StringBuilder(); + queryBuilder.append(" CHANGE COLUMN `").append(oldColumn.getName()).append("` ") + .append(QueryGenerationUtils.getColumnRepresentation(newColumn)); + + if(cascade){ + queryBuilder.append(" CASCADE"); + } + + queries.add(queryBuilder.toString()); + } + } + + if (i < newColumns.size()) { + StringBuilder queryBuilder = new StringBuilder(); + queryBuilder.append(" ADD COLUMNS ( "); + boolean first = true; + for (; i < newColumns.size(); i++) { + foundChange = true; + ColumnInfo columnInfo = newColumns.get(i); + if (!first) { + queryBuilder.append(", "); + } else { + first = false; + } + + queryBuilder.append(QueryGenerationUtils.getColumnRepresentation(columnInfo)); + } + queryBuilder.append(" )"); + + if(cascade){ + queryBuilder.append(" CASCADE"); + } + + queries.add(queryBuilder.toString()); + } + + if (foundChange) { + return Optional.of(queries); + } else { + return Optional.absent(); + } + } + + Optional<String> generateStoragePropertyQuery(List<String> oldBucketCols, List<ColumnOrder> oldSortCols, String oldNumBuckets, List<String> newBucketCols, List<ColumnOrder> newSortCols, String newNumBuckets) { + Optional<String> query = createStoragePropertyQuery(oldBucketCols, oldSortCols, oldNumBuckets, newBucketCols, newSortCols, newNumBuckets); + if (query.isPresent()) return Optional.of(getQueryPerfix() + query.get()); + else return Optional.absent(); + } + + /** + * ALTER TABLE table_name CLUSTERED BY (col_name, col_name, ...) [SORTED BY (col_name, ...)] + * INTO num_buckets BUCKETS; + * + * @param oldBucketCols + * @param oldSortCols + * @param oldNumBuckets + * @param newBucketCols + * @param newSortCols + * @param newNumBuckets + * @return + */ + static Optional<String> createStoragePropertyQuery(List<String> oldBucketCols, List<ColumnOrder> oldSortCols, String oldNumBuckets, List<String> newBucketCols, List<ColumnOrder> newSortCols, String newNumBuckets) { + StringBuilder queryBuilder = new StringBuilder(); + boolean foundDiff = false; + + if (isNullOrEmpty(newBucketCols)) { + if (!isNullOrEmpty(oldBucketCols)) { + // TODO : all cols removed. how to handle this. Ignoring + LOG.error("cannot handle removal of all the columns from buckets."); + throw new IllegalArgumentException("removing all columns from CLUSTERED BY not allowed."); + } else { + // NOTHING ADDED to CLUSTERED BY. + return Optional.absent(); + } + } else { + queryBuilder.append(" CLUSTERED BY ( ").append(Joiner.on(",").join(newBucketCols)).append(" ) "); + } + + if (!isNullOrEmpty(newSortCols)) { + queryBuilder.append(" SORTED BY ( ") + .append(Joiner.on(",").join(FluentIterable.from(newSortCols).transform(new Function<ColumnOrder, String>() { + @Nullable + @Override + public String apply(@Nullable ColumnOrder input) { + return input.getColumnName() + " " + input.getOrder().name(); + } + }))) + .append(" ) "); + } + + if (Strings.isNullOrEmpty(newNumBuckets)) { + LOG.error("Number of buckets cannot be empty if CLUSTERED BY is mentioned."); + throw new IllegalArgumentException("Number of buckets cannot be empty."); + } else { + queryBuilder.append(" INTO ").append(newNumBuckets).append(" BUCKETS "); + } + + return Optional.of(queryBuilder.toString()); + } + + Optional<String> generateSerdeQuery(String oldSerde, Map<String, String> oldParameters, String newSerde, Map<String, String> newParameters) { + Optional<String> query = createSerdeQuery(oldSerde, oldParameters, newSerde, newParameters); + if (query.isPresent()) return Optional.of(getQueryPerfix() + query.get()); + else return Optional.absent(); + } + + /** + * assuming that getStorageInfo().getParameters() gives only serde properties + * + * @return + */ + static Optional<String> createSerdeQuery(String oldSerde, Map<String, String> oldParameters, String newSerde, Map<String, String> newParameters) { + String query = ""; + boolean serdeChanged = false; + if (null != newSerde) { + serdeChanged = !newSerde.equals(oldSerde); + query += " SET SERDE " + newSerde + " "; + } + Optional<Map<String, Map<Object, Object>>> diff = QueryGenerationUtils.findDiff(oldParameters, newParameters); + if (diff.isPresent()) { + Map<String, Map<Object, Object>> diffMap = diff.get(); + Map<Object, Object> added = diffMap.get(QueryGenerationUtils.ADDED); + Map<Object, Object> modified = diffMap.get(QueryGenerationUtils.MODIFIED); + Map<Object, Object> deleted = diffMap.get(QueryGenerationUtils.DELETED); + + // TODO : how to handle deleted? actually I cannot find anything in hive alter table that will remove existing property + Map addedOrModified = new HashMap<>(added); + addedOrModified.putAll(modified); + + if (serdeChanged) { + query += " WITH SERDEPROPERTIES "; + } else { + query += " SET SERDEPROPERTIES "; + } + query += " ( " + QueryGenerationUtils.getPropertiesAsKeyValues(addedOrModified) + " ) "; + } + + if (!query.trim().isEmpty()) { + return Optional.of(query); + } + + return Optional.absent(); + } + + Optional<String> generateTablePropertiesQuery(Map oldProps, Map newProps) { + Optional<String> query = createTablePropertiesQuery(oldProps, newProps); + if (query.isPresent()) return Optional.of(getQueryPerfix() + query.get()); + else return Optional.absent(); + } + + + static Optional<String> createTablePropertiesQuery(Map oldProps, Map newProps) { + if (null == newProps) { + newProps = new HashMap(); + } +// TODO ignore system generated table properties during comparison + if (!QueryGenerationUtils.isEqual(oldProps, newProps)) { + return Optional.of(" SET TBLPROPERTIES (" + QueryGenerationUtils.getPropertiesAsKeyValues(newProps) + ")"); + } + + return Optional.absent(); + } + + Optional<String> generateTableRenameQuery(String oldDatabaseName, String oldTableName, String newDatabaseName, String newTableName) { + Optional<String> query = createTableRenameQuery(oldDatabaseName, oldTableName, newDatabaseName, newTableName); + if (query.isPresent()) return Optional.of(getQueryPerfix() + query.get()); + else return Optional.absent(); + } + + static Optional<String> createTableRenameQuery(String oldDatabaseName, String oldTableName, String newDatabaseName, String newTableName) { + if (Strings.isNullOrEmpty(oldTableName) || Strings.isNullOrEmpty(newTableName)) { + LOG.error("oldTableName or newTableName is empty : {}, {} ", oldTableName, newTableName); + throw new IllegalArgumentException("oldTableName and newTableName both should be non empty."); + } + + String oldName = (null != oldDatabaseName ? oldDatabaseName.trim() + "." : "") + oldTableName.trim(); + String newName = (null != newDatabaseName ? newDatabaseName.trim() + "." : "") + newTableName.trim(); + + if (!oldName.equals(newName)) { + return Optional.of(" RENAME TO " + newName); + } + + return Optional.absent(); + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/CreateTableQueryGenerator.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/CreateTableQueryGenerator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/CreateTableQueryGenerator.java new file mode 100644 index 0000000..eab3a4b --- /dev/null +++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/CreateTableQueryGenerator.java @@ -0,0 +1,165 @@ +/* +* Licensed to the Apache Software Foundation (ASF) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The ASF licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package org.apache.ambari.view.hive20.internal.query.generators; + +import com.google.common.base.Function; +import com.google.common.base.Joiner; +import com.google.common.base.Optional; +import com.google.common.base.Strings; +import com.google.common.collect.FluentIterable; +import org.apache.ambari.view.hive20.internal.dto.ColumnInfo; +import org.apache.ambari.view.hive20.internal.dto.ColumnOrder; +import org.apache.ambari.view.hive20.internal.dto.TableMeta; + +import javax.annotation.Nullable; +import java.util.List; +import java.util.Map; + +public class CreateTableQueryGenerator implements QueryGenerator{ + private static final String COMMENT = "COMMENT"; + public static final String ESCAPE_DELIM = "escape.delim"; + public static final String FIELD_DELIM = "field.delim"; + public static final String COLELCTION_DELIM = "colelction.delim"; + public static final String MAPKEY_DELIM = "mapkey.delim"; + public static final String LINE_DELIM = "line.delim"; + public static final String SERIALIZATION_NULL_FORMAT = "serialization.null.format"; + private TableMeta tableMeta; + public CreateTableQueryGenerator(TableMeta tableMeta) { + this.tableMeta = tableMeta; + } + + @Override + public Optional<String> getQuery(){ + StringBuffer query = new StringBuffer(); + query.append("CREATE TABLE "); + query.append(tableMeta.getDatabase()).append("."); + query.append(tableMeta.getTable()).append(" "); + query.append("(").append(getColumnQuery(tableMeta.getColumns())).append(") "); + if(null != tableMeta.getDetailedInfo() && null != tableMeta.getDetailedInfo().getParameters()){ + String tableComment = tableMeta.getDetailedInfo().getParameters().get(COMMENT); + if(!Strings.isNullOrEmpty(tableComment)){ + query.append(" COMMENT ").append(tableComment); + } + } + if(null != tableMeta.getPartitionInfo() ) { + if (tableMeta.getPartitionInfo().getColumns() != null && !tableMeta.getPartitionInfo().getColumns().isEmpty()) { + query.append(" PARTITIONED BY ( ").append(getColumnQuery(tableMeta.getPartitionInfo().getColumns())).append(")"); + } + } + if(null != tableMeta.getStorageInfo()) { + if (!QueryGenerationUtils.isNullOrEmpty(tableMeta.getStorageInfo().getBucketCols())) { + query.append(" CLUSTERED BY (").append(Joiner.on(",").join(tableMeta.getStorageInfo().getBucketCols())).append(")"); + } + if (!QueryGenerationUtils.isNullOrEmpty(tableMeta.getStorageInfo().getSortCols())) { + query.append(" SORTED BY (").append(getSortColQuery(tableMeta.getStorageInfo().getSortCols())).append(")"); + } + if (!Strings.isNullOrEmpty(tableMeta.getStorageInfo().getNumBuckets())) { + query.append(" INTO ").append(tableMeta.getStorageInfo().getNumBuckets()).append(" BUCKETS "); + } + // TODO : Skewed information not available right now. + + if(!isNullOrEmpty(tableMeta.getStorageInfo().getParameters())) { + if (!Strings.isNullOrEmpty(tableMeta.getStorageInfo().getParameters().get(ESCAPE_DELIM)) || + !Strings.isNullOrEmpty(tableMeta.getStorageInfo().getParameters().get(FIELD_DELIM)) || + !Strings.isNullOrEmpty(tableMeta.getStorageInfo().getParameters().get(COLELCTION_DELIM)) || + !Strings.isNullOrEmpty(tableMeta.getStorageInfo().getParameters().get(MAPKEY_DELIM)) || + !Strings.isNullOrEmpty(tableMeta.getStorageInfo().getParameters().get(LINE_DELIM)) || + !Strings.isNullOrEmpty(tableMeta.getStorageInfo().getParameters().get(SERIALIZATION_NULL_FORMAT)) + ) { + query.append(" ROW FORMAT DELIMITED "); + if (!Strings.isNullOrEmpty(tableMeta.getStorageInfo().getParameters().get(FIELD_DELIM))) { + query.append(" FIELDS TERMINATED BY '").append(tableMeta.getStorageInfo().getParameters().get(FIELD_DELIM)).append("'"); + } + if (!Strings.isNullOrEmpty(tableMeta.getStorageInfo().getParameters().get(ESCAPE_DELIM))) { + query.append(" ESCAPED BY '").append(tableMeta.getStorageInfo().getParameters().get(ESCAPE_DELIM)).append("'"); + } + if (!Strings.isNullOrEmpty(tableMeta.getStorageInfo().getParameters().get(COLELCTION_DELIM))) { + query.append(" COLLECTION ITEMS TERMINATED BY '").append(tableMeta.getStorageInfo().getParameters().get(COLELCTION_DELIM)).append("'"); + } + if (!Strings.isNullOrEmpty(tableMeta.getStorageInfo().getParameters().get(MAPKEY_DELIM))) { + query.append(" MAP KEYS TERMINATED BY '").append(tableMeta.getStorageInfo().getParameters().get(MAPKEY_DELIM)).append("'"); + } + if (!Strings.isNullOrEmpty(tableMeta.getStorageInfo().getParameters().get(LINE_DELIM))) { + query.append(" LINES TERMINATED BY '").append(tableMeta.getStorageInfo().getParameters().get(LINE_DELIM)).append("'"); + } + if (!Strings.isNullOrEmpty(tableMeta.getStorageInfo().getParameters().get(SERIALIZATION_NULL_FORMAT))) { + query.append(" NULL DEFINED AS '").append(tableMeta.getStorageInfo().getParameters().get(SERIALIZATION_NULL_FORMAT)).append("'"); + } + } + } + + // STORED AS file_format + if(!Strings.isNullOrEmpty(tableMeta.getStorageInfo().getFileFormat()) && !tableMeta.getStorageInfo().getFileFormat().trim().isEmpty()){ + query.append(" STORED AS ").append(tableMeta.getStorageInfo().getFileFormat().trim()); + }else if (!Strings.isNullOrEmpty(tableMeta.getStorageInfo().getInputFormat()) || + !Strings.isNullOrEmpty(tableMeta.getStorageInfo().getOutputFormat()) + ) { + query.append(" STORED AS "); + if (!Strings.isNullOrEmpty(tableMeta.getStorageInfo().getInputFormat())) { + query.append(" INPUTFORMAT '").append(tableMeta.getStorageInfo().getInputFormat()).append("'"); + } + if (!Strings.isNullOrEmpty(tableMeta.getStorageInfo().getOutputFormat())) { + query.append(" OUTPUTFORMAT '").append(tableMeta.getStorageInfo().getOutputFormat()).append("'"); + } + } + } + + if(null != tableMeta.getDetailedInfo()) { + if (!Strings.isNullOrEmpty(tableMeta.getDetailedInfo().getLocation())) { + query.append(" LOCATION '").append(tableMeta.getDetailedInfo().getLocation()).append("'"); + } + + if (QueryGenerationUtils.isNullOrEmpty(tableMeta.getDetailedInfo().getParameters())) { + String props = QueryGenerationUtils.getPropertiesAsKeyValues(tableMeta.getDetailedInfo().getParameters()); + + query.append(" TBLPROPERTIES (").append(props).append(")"); + } + } + + return Optional.of(query.toString()); + } + + private boolean isNullOrEmpty(Map map) { + return null == map || map.isEmpty(); + } + + private String getSortColQuery(List<ColumnOrder> sortCols) { + List<String> sortColsList = FluentIterable.from(sortCols).transform(new Function<ColumnOrder, String>() { + @Nullable + @Override + public String apply(@Nullable ColumnOrder input) { + return input.getColumnName() + " " + input.getOrder().name(); + } + }).toList(); + return Joiner.on(",").join(sortColsList); + } + + private String getColumnQuery(List<ColumnInfo> columns) { + List<String> columnQuery = FluentIterable.from(columns).transform(new Function<ColumnInfo, String>() { + @Nullable + @Override + public String apply(@Nullable ColumnInfo column) { + return QueryGenerationUtils.getColumnRepresentation(column); + } + }).toList(); + + return Joiner.on(",").join(columnQuery); + } + +} http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/DeleteTableQueryGenerator.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/DeleteTableQueryGenerator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/DeleteTableQueryGenerator.java new file mode 100644 index 0000000..09b12b6 --- /dev/null +++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/DeleteTableQueryGenerator.java @@ -0,0 +1,67 @@ +/* +* Licensed to the Apache Software Foundation (ASF) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The ASF licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package org.apache.ambari.view.hive20.internal.query.generators; + +import com.google.common.base.Optional; +import org.apache.ambari.view.hive20.exceptions.ServiceException; +import org.apache.parquet.Strings; + +public class DeleteTableQueryGenerator implements QueryGenerator{ + private final String databaseName; + private final String tableName; + private Boolean purge = Boolean.FALSE; + + public DeleteTableQueryGenerator(String databaseName, String tableName) { + this(databaseName, tableName, Boolean.FALSE); + } + + public DeleteTableQueryGenerator(String databaseName, String tableName, Boolean purge) { + this.databaseName = databaseName; + this.tableName = tableName; + if( null != purge ) this.purge = purge; + } + + public String getDatabaseName() { + return databaseName; + } + + public String getTableName() { + return tableName; + } + + public Boolean getPurge() { + return purge; + } + + public void setPurge(Boolean purge) { + this.purge = purge; + } + + /** + * @return + * @throws ServiceException + */ + @Override + public Optional<String> getQuery() throws ServiceException { + if(Strings.isNullOrEmpty(this.getDatabaseName()) || Strings.isNullOrEmpty(this.getTableName())) + throw new ServiceException("databaseName or tableName was null."); + + return Optional.of("DROP TABLE `" + databaseName + "`.`" + tableName + "`" + (this.getPurge() ? " PURGE " : "")); + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/QueryGenerationUtils.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/QueryGenerationUtils.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/QueryGenerationUtils.java new file mode 100644 index 0000000..d9dc6e1 --- /dev/null +++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/QueryGenerationUtils.java @@ -0,0 +1,151 @@ +/* +* Licensed to the Apache Software Foundation (ASF) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The ASF licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package org.apache.ambari.view.hive20.internal.query.generators; + +import com.google.common.base.Function; +import com.google.common.base.Joiner; +import com.google.common.base.Optional; +import com.google.common.base.Strings; +import com.google.common.collect.FluentIterable; +import org.apache.ambari.view.hive20.internal.dto.ColumnInfo; + +import javax.annotation.Nullable; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class QueryGenerationUtils { + + public static final String ADDED = "ADDED"; + public static final String DELETED = "DELETED"; + public static final String MODIFIED = "MODIFIED"; + + public static boolean isNullOrEmpty(Map map) { + return null != map && !map.isEmpty(); + } + + public static boolean isNullOrEmpty(Collection collection) { + return null == collection || collection.isEmpty(); + } + + public static boolean isEqual(Map oldProps, Map newProps) { + if(oldProps == null && newProps == null) return true; + + if(oldProps != null && newProps != null){ + if(oldProps.size() != newProps.size()) return false; + + Set<Map.Entry> entrySet = oldProps.entrySet(); + for(Map.Entry e : entrySet){ + Object key = e.getKey(); + if(oldProps.get(key) == null){ + if(newProps.get(key) != null) return false; + }else { + if (newProps.get(key) == null || !newProps.get(key).equals(oldProps.get(key))) { + return false; + } + } + } + } + + return true; + } + + /** + * return a map with 3 keys "DELETED" and "ADDED" and "MODIFIED" to show the different between oldProps and newProps + * for "ADDED" and "MODIFIED" the values in map are of newProps + * @param oldProps + * @param newProps + * @return + */ + public static Optional<Map<String, Map<Object,Object>>> findDiff(Map oldProps, Map newProps) { + Map<String, Map<Object, Object>> ret = new HashMap<>(); + Map<Object, Object> added = new HashMap<>(); + Map<Object, Object> modified = new HashMap<>(); + Map<Object, Object> deleted = new HashMap<>(); + + if(oldProps == null && newProps == null) return Optional.of(ret); + + if(oldProps != null && newProps != null){ + Set<Map.Entry> entrySet = oldProps.entrySet(); + for(Map.Entry e : entrySet){ + Object key = e.getKey(); + Object newValue = newProps.get(key); + if(e.getValue() == null){ + if( newValue != null){ + added.put(key, newValue); + } + }else { + if (newValue == null) { + deleted.put(key, newValue); + }else if (!e.getValue().equals(newValue)){ + modified.put(key, newValue); + } + } + } + + Set<Map.Entry> newEntrySet = newProps.entrySet(); + for(Map.Entry e : newEntrySet){ + if(e.getValue() != null && oldProps.get(e.getKey()) == null){ + added.put(e.getKey(), e.getValue()); + } + } + } + ret.put(ADDED, added); + ret.put(DELETED, deleted); + ret.put(MODIFIED, modified); + + return Optional.of(ret); + } + + public static String getPropertiesAsKeyValues(Map<String, String> parameters) { + List<String> props = (List<String>) FluentIterable.from(parameters.entrySet()) + .transform(new Function<Map.Entry<String, String>, String>() { + @Nullable + @Override + public String apply(@Nullable Map.Entry<String, String> entry) { + return "'" + entry.getKey() + "'='" + entry.getValue() + "'"; + } + }).toList(); + + return Joiner.on(",").join(props); + } + + public static String getColumnRepresentation(ColumnInfo column) { + StringBuilder colQuery = new StringBuilder().append("`").append(column.getName()).append("`"); + colQuery.append(" ").append(column.getType()); + if(!QueryGenerationUtils.isNullOrZero(column.getPrecision())){ + if(!QueryGenerationUtils.isNullOrZero(column.getScale())){ + colQuery.append("(").append(column.getPrecision()).append(",").append(column.getScale()).append(")"); + }else{ + colQuery.append("(").append(column.getPrecision()).append(")"); + } + } + if(!Strings.isNullOrEmpty(column.getComment())) { + colQuery.append(" COMMENT '").append(column.getComment()).append("'"); + } + + return colQuery.toString(); + } + + public static boolean isNullOrZero(Integer integer) { + return null == integer || 0 == integer; + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/QueryGenerator.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/QueryGenerator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/QueryGenerator.java new file mode 100644 index 0000000..0d8f350 --- /dev/null +++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/QueryGenerator.java @@ -0,0 +1,26 @@ +/* +* Licensed to the Apache Software Foundation (ASF) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The ASF licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package org.apache.ambari.view.hive20.internal.query.generators; + +import com.google.common.base.Optional; +import org.apache.ambari.view.hive20.exceptions.ServiceException; + +public interface QueryGenerator { + Optional<String> getQuery() throws ServiceException; +} http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/DataStoreStorage.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/DataStoreStorage.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/DataStoreStorage.java new file mode 100644 index 0000000..133ff08 --- /dev/null +++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/DataStoreStorage.java @@ -0,0 +1,140 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.view.hive20.persistence; + +import org.apache.ambari.view.PersistenceException; +import org.apache.ambari.view.ViewContext; +import org.apache.ambari.view.hive20.persistence.utils.FilteringStrategy; +import org.apache.ambari.view.hive20.persistence.utils.Indexed; +import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound; +import org.apache.ambari.view.hive20.persistence.utils.OnlyOwnersFilteringStrategy; +import org.apache.ambari.view.hive20.utils.ServiceFormattedException; +import org.apache.commons.beanutils.BeanUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.beans.Transient; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.util.LinkedList; +import java.util.List; + +/** + * Engine for storing objects to context DataStore storage + */ +public class DataStoreStorage implements Storage { + private final static Logger LOG = + LoggerFactory.getLogger(DataStoreStorage.class); + + protected ViewContext context; + + /** + * Constructor + * @param context View Context instance + */ + public DataStoreStorage(ViewContext context) { + this.context = context; + } + + @Override + public synchronized void store(Class model, Indexed obj) { + + try { + Indexed newBean = (Indexed) BeanUtils.cloneBean(obj); + preprocessEntity(newBean); + context.getDataStore().store(newBean); + obj.setId(newBean.getId()); + } catch (Exception e) { + throw new ServiceFormattedException("S020 Data storage error", e); + } + } + + private void preprocessEntity(Indexed obj) { + cleanTransientFields(obj); + } + + private void cleanTransientFields(Indexed obj) { + for (Method m : obj.getClass().getMethods()) { + Transient aTransient = m.getAnnotation(Transient.class); + if (aTransient != null && m.getName().startsWith("set")) { + try { + m.invoke(obj, new Object[]{ null }); + } catch (IllegalAccessException e) { + throw new ServiceFormattedException("S030 Data storage error", e); + } catch (InvocationTargetException e) { + throw new ServiceFormattedException("S030 Data storage error", e); + } + } + } + } + + @Override + public synchronized <T extends Indexed> T load(Class<T> model, Object id) throws ItemNotFound { + LOG.debug(String.format("Loading %s #%s", model.getName(), id)); + try { + T obj = context.getDataStore().find(model, id); + if (obj != null) { + return obj; + } else { + throw new ItemNotFound(); + } + } catch (PersistenceException e) { + throw new ServiceFormattedException("S040 Data storage error", e); + } + } + + @Override + public synchronized <T extends Indexed> List<T> loadAll(Class<? extends T> model, FilteringStrategy filter) { + LinkedList<T> list = new LinkedList<T>(); + LOG.debug(String.format("Loading all %s-s", model.getName())); + try { + for(T item: context.getDataStore().findAll(model, filter.whereStatement())) { + list.add(item); + } + } catch (PersistenceException e) { + throw new ServiceFormattedException("S050 Data storage error", e); + } + return list; + } + + @Override + public synchronized <T extends Indexed> List<T> loadAll(Class<T> model) { + return loadAll(model, new OnlyOwnersFilteringStrategy(this.context.getUsername())); + } + + @Override + public synchronized void delete(Class model, Object id) throws ItemNotFound { + LOG.debug(String.format("Deleting %s:%s", model.getName(), id)); + Object obj = load(model, id); + try { + context.getDataStore().remove(obj); + } catch (PersistenceException e) { + throw new ServiceFormattedException("S060 Data storage error", e); + } + } + + @Override + public boolean exists(Class model, Object id) { + try { + return context.getDataStore().find(model, id) != null; + } catch (PersistenceException e) { + throw new ServiceFormattedException("S070 Data storage error", e); + } + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/IStorageFactory.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/IStorageFactory.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/IStorageFactory.java new file mode 100644 index 0000000..eaad7ba --- /dev/null +++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/IStorageFactory.java @@ -0,0 +1,23 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.view.hive20.persistence; + +public interface IStorageFactory { + Storage getStorage(); +} http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/InstanceKeyValueStorage.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/InstanceKeyValueStorage.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/InstanceKeyValueStorage.java new file mode 100644 index 0000000..f02b35d --- /dev/null +++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/InstanceKeyValueStorage.java @@ -0,0 +1,132 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.view.hive20.persistence; + +import org.apache.ambari.view.ViewContext; +import org.apache.ambari.view.hive20.persistence.utils.ContextConfigurationAdapter; +import org.apache.ambari.view.hive20.utils.ServiceFormattedException; +import org.apache.commons.configuration.Configuration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.ws.rs.WebApplicationException; + + +/** + * Persistent storage engine for storing java beans to + * instance data + */ +@Deprecated +public class InstanceKeyValueStorage extends KeyValueStorage { + private final static Logger LOG = + LoggerFactory.getLogger(InstanceKeyValueStorage.class); + + private ContextConfigurationAdapter config = null; + private int VALUE_LENGTH_LIMIT = 254; + + /** + * Constructor. + * @param context View Context instance + */ + public InstanceKeyValueStorage(ViewContext context) { + super(context); + } + + /** + * Returns config instance, adapter to Persistence API + * @return config instance + */ + @Override + protected synchronized Configuration getConfig() { + if (config == null) { + config = new ContextConfigurationAdapter(context); + } + return config; + } + + /** + * Value is limited to 256 symbols, this code splits value into chunks and saves them as <key>#<chunk_id> + * @param modelPropName key + * @param json value + */ + protected void write(String modelPropName, String json) { + int saved = 0; + int page = 1; + while (saved < json.length()) { + int end = Math.min(saved + VALUE_LENGTH_LIMIT, json.length()); + String substring = json.substring(saved, end); + getConfig().setProperty(modelPropName + "#" + page, substring); + saved += VALUE_LENGTH_LIMIT; + page += 1; + LOG.debug("Chunk saved: " + modelPropName + "#" + page + "=" + substring); + } + getConfig().setProperty(modelPropName, page - 1); + LOG.debug("Write finished: " + modelPropName + " pages:" + (page - 1)); + } + + /** + * Read chunked value (keys format <key>#<chunk_id>) + * @param modelPropName key + * @return value + */ + protected String read(String modelPropName) { + StringBuilder result = new StringBuilder(); + int pages = getConfig().getInt(modelPropName); + LOG.debug("Read started: " + modelPropName + " pages:" + pages); + + for(int page = 1; page <= pages; page++) { + String substring = getConfig().getString(modelPropName + "#" + page); + LOG.debug("Chunk read: " + modelPropName + "#" + page + "=" + substring); + if (substring != null) { + result.append(substring); + } + } + + return result.toString(); + } + + /** + * Remove chunked value (keys format <key>#<chunk_id>) + * @param modelPropName key + */ + protected void clear(String modelPropName) { + int pages = getConfig().getInt(modelPropName); + LOG.debug("Clean started: " + modelPropName + " pages:" + pages); + + for(int page = 1; page <= pages; page++) { + getConfig().clearProperty(modelPropName + "#" + page); + LOG.debug("Chunk clean: " + modelPropName + "#" + page); + } + getConfig().clearProperty(modelPropName); + } + + public static void storageSmokeTest(ViewContext context) { + try { + final String property = "test.smoke.property"; + context.putInstanceData(property, "42"); + boolean status = context.getInstanceData(property).equals("42"); + context.removeInstanceData(property); + if (!status) throw new ServiceFormattedException("Ambari Views instance data DB doesn't work properly", null); + } catch (WebApplicationException ex) { + throw ex; + } catch (Exception ex) { + throw new ServiceFormattedException(ex.getMessage(), ex); + } + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/KeyValueStorage.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/KeyValueStorage.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/KeyValueStorage.java new file mode 100644 index 0000000..e3ed2b4 --- /dev/null +++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/KeyValueStorage.java @@ -0,0 +1,163 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.view.hive20.persistence; + +import com.google.gson.Gson; +import org.apache.ambari.view.ViewContext; +import org.apache.ambari.view.hive20.persistence.utils.FilteringStrategy; +import org.apache.ambari.view.hive20.persistence.utils.Indexed; +import org.apache.ambari.view.hive20.persistence.utils.ItemNotFound; +import org.apache.ambari.view.hive20.persistence.utils.OnlyOwnersFilteringStrategy; +import org.apache.commons.configuration.Configuration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.List; + +/** + * Engine for storing objects to key-value storage + */ +public abstract class KeyValueStorage implements Storage { + private final static Logger LOG = + LoggerFactory.getLogger(KeyValueStorage.class); + protected final Gson gson = new Gson(); + protected ViewContext context; + + /** + * Constructor + * @param context View Context instance + */ + public KeyValueStorage(ViewContext context) { + this.context = context; + } + + /** + * Returns config instance, adapter to Persistence API + * @return config instance + */ + protected abstract Configuration getConfig(); + + @Override + public <T extends Indexed> void store(Class<T> model, Indexed obj) { + String modelIndexingPropName = getIndexPropertyName(model); + + if (obj.getId() == null) { + int lastIndex = getConfig().getInt(modelIndexingPropName, 0); + lastIndex ++; + getConfig().setProperty(modelIndexingPropName, lastIndex); + obj.setId(String.valueOf(lastIndex)); + } + + String modelPropName = getItemPropertyName(model, obj.getId()); + String json = serialize(obj); + write(modelPropName, json); + } + + @Override + public <T extends Indexed> T load(Class<T> model, Object id) throws ItemNotFound { + String modelPropName = getItemPropertyName(model, id); + LOG.debug(String.format("Loading %s", modelPropName)); + if (getConfig().containsKey(modelPropName)) { + String json = read(modelPropName); + LOG.debug(String.format("json: %s", json)); + + return deserialize(model, json); + } else { + throw new ItemNotFound(); + } + } + + /** + * Write json to storage + * @param modelPropName key + * @param json value + */ + protected void write(String modelPropName, String json) { + getConfig().setProperty(modelPropName, json); + } + + /** + * Read json from storage + * @param modelPropName key + * @return value + */ + protected String read(String modelPropName) { + return getConfig().getString(modelPropName); + } + + /** + * Remove line from storage + * @param modelPropName key + */ + protected void clear(String modelPropName) { + getConfig().clearProperty(modelPropName); + } + + protected String serialize(Indexed obj) { + return gson.toJson(obj); + } + + protected <T extends Indexed> T deserialize(Class<T> model, String json) { + return gson.fromJson(json, model); + } + + @Override + public synchronized <T extends Indexed> List<T> loadAll(Class<? extends T> model, FilteringStrategy filter) { + ArrayList<T> list = new ArrayList<T>(); + String modelIndexingPropName = getIndexPropertyName(model); + LOG.debug(String.format("Loading all %s-s", model.getName())); + int lastIndex = getConfig().getInt(modelIndexingPropName, 0); + for(int i=1; i<=lastIndex; i++) { + try { + T item = load(model, i); + if ((filter == null) || filter.isConform(item)) { + list.add(item); + } + } catch (ItemNotFound ignored) { + } + } + return list; + } + + @Override + public synchronized <T extends Indexed> List<T> loadAll(Class<T> model) { + return loadAll(model, new OnlyOwnersFilteringStrategy(this.context.getUsername())); + } + + @Override + public synchronized void delete(Class model, Object id) { + LOG.debug(String.format("Deleting %s:%s", model.getName(), id)); + String modelPropName = getItemPropertyName(model, id); + clear(modelPropName); + } + + @Override + public boolean exists(Class model, Object id) { + return getConfig().containsKey(getItemPropertyName(model, id)); + } + + private String getIndexPropertyName(Class model) { + return String.format("%s:index", model.getName()); + } + + private String getItemPropertyName(Class model, Object id) { + return String.format("%s.%s", model.getName(), id); + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/LocalKeyValueStorage.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/LocalKeyValueStorage.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/LocalKeyValueStorage.java new file mode 100644 index 0000000..9aee9d2 --- /dev/null +++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/LocalKeyValueStorage.java @@ -0,0 +1,69 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.view.hive20.persistence; + +import org.apache.ambari.view.ViewContext; +import org.apache.ambari.view.hive20.utils.MisconfigurationFormattedException; +import org.apache.commons.configuration.ConfigurationException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + + +/** + * Persistent storage engine for storing java beans to + * properties file + * Path to file should be in 'dataworker.storagePath' parameter + */ +@Deprecated +public class LocalKeyValueStorage extends KeyValueStorage { + private final static Logger LOG = + LoggerFactory.getLogger(LocalKeyValueStorage.class); + + private PersistentConfiguration config = null; + + /** + * Constructor + * @param context View Context instance + */ + public LocalKeyValueStorage(ViewContext context) { + super(context); + } + + /** + * Returns config instance + * @return config instance + */ + @Override + protected synchronized PersistentConfiguration getConfig() { + if (config == null) { + String fileName = context.getProperties().get("dataworker.storagePath"); + if (fileName == null) { + String msg = "dataworker.storagePath is not configured!"; + LOG.error(msg); + throw new MisconfigurationFormattedException("dataworker.storagePath"); + } + try { + config = new PersistentConfiguration(fileName); + } catch (ConfigurationException e) { + e.printStackTrace(); + } + } + return config; + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/PersistentConfiguration.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/PersistentConfiguration.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/PersistentConfiguration.java new file mode 100644 index 0000000..c9d7bb7 --- /dev/null +++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/persistence/PersistentConfiguration.java @@ -0,0 +1,52 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.view.hive20.persistence; + +import org.apache.commons.configuration.ConfigurationException; +import org.apache.commons.configuration.PropertiesConfiguration; +import org.apache.commons.configuration.reloading.FileChangedReloadingStrategy; + +import java.io.File; + +/** + * Configuration enables all necessary options for PropertiesConfiguration: + * auto-save, auto-reloading, no delimiter parsing and other + */ +@Deprecated +public class PersistentConfiguration extends PropertiesConfiguration { + /** + * Constructor + * @param fileName path to data file + * @throws ConfigurationException + */ + public PersistentConfiguration(String fileName) throws ConfigurationException { + super(); + + File config = new File(fileName); + setFile(config); + this.setAutoSave(true); + this.setReloadingStrategy(new FileChangedReloadingStrategy()); + this.setDelimiterParsingDisabled(true); + this.setListDelimiter((char) 0); + + if (config.exists()) { + this.load(); + } + } +}
