http://git-wip-us.apache.org/repos/asf/hive/blob/133d3c47/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/ByteArrayWrapper.java ---------------------------------------------------------------------- diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/ByteArrayWrapper.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/ByteArrayWrapper.java new file mode 100644 index 0000000..2e92a4f --- /dev/null +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/ByteArrayWrapper.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.metastore.cache; + +import java.util.Arrays; + +/** + * byte array with comparator + */ +public class ByteArrayWrapper { + byte[] wrapped; + + ByteArrayWrapper(byte[] b) { + wrapped = b; + } + + @Override + public boolean equals(Object other) { + if (other instanceof ByteArrayWrapper) { + return Arrays.equals(((ByteArrayWrapper)other).wrapped, wrapped); + } else { + return false; + } + } + + @Override + public int hashCode() { + return Arrays.hashCode(wrapped); + } +} \ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hive/blob/133d3c47/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/CacheUtils.java ---------------------------------------------------------------------- diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/CacheUtils.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/CacheUtils.java new file mode 100644 index 0000000..ab6b90f --- /dev/null +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/CacheUtils.java @@ -0,0 +1,143 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.metastore.cache; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.regex.Pattern; + +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.SkewedInfo; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.metastore.cache.CachedStore.PartitionWrapper; +import org.apache.hadoop.hive.metastore.cache.CachedStore.TableWrapper; +import org.apache.hadoop.hive.metastore.utils.StringUtils; + +public class CacheUtils { + private static final String delimit = "\u0001"; + + public static String buildKey(String dbName, String tableName) { + return dbName + delimit + tableName; + } + + public static String buildKeyWithDelimit(String dbName, String tableName) { + return buildKey(dbName, tableName) + delimit; + } + + public static String buildKey(String dbName, String tableName, List<String> partVals) { + String key = buildKey(dbName, tableName); + if (partVals == null || partVals.size() == 0) { + return key; + } + for (int i = 0; i < partVals.size(); i++) { + key += partVals.get(i); + if (i != partVals.size() - 1) { + key += delimit; + } + } + return key; + } + + public static String buildKeyWithDelimit(String dbName, String tableName, List<String> partVals) { + return buildKey(dbName, tableName, partVals) + delimit; + } + + public static String buildKey(String dbName, String tableName, List<String> partVals, String colName) { + String key = buildKey(dbName, tableName, partVals); + return key + delimit + colName; + } + + public static String buildKey(String dbName, String tableName, String colName) { + String key = buildKey(dbName, tableName); + return key + delimit + colName; + } + + public static String[] splitTableColStats(String key) { + return key.split(delimit); + } + + public static Object[] splitPartitionColStats(String key) { + Object[] result = new Object[4]; + String[] comps = key.split(delimit); + result[0] = comps[0]; + result[1] = comps[1]; + List<String> vals = new ArrayList<>(); + for (int i=2;i<comps.length-2;i++) { + vals.add(comps[i]); + } + result[2] = vals; + result[3] = comps[comps.length-1]; + return result; + } + + static Table assemble(TableWrapper wrapper, SharedCache sharedCache) { + Table t = wrapper.getTable().deepCopy(); + if (wrapper.getSdHash()!=null) { + StorageDescriptor sdCopy = sharedCache.getSdFromCache(wrapper.getSdHash()).deepCopy(); + if (sdCopy.getBucketCols()==null) { + sdCopy.setBucketCols(new ArrayList<>()); + } + if (sdCopy.getSortCols()==null) { + sdCopy.setSortCols(new ArrayList<>()); + } + if (sdCopy.getSkewedInfo()==null) { + sdCopy.setSkewedInfo(new SkewedInfo(new ArrayList<>(), + new ArrayList<>(), new HashMap<>())); + } + sdCopy.setLocation(wrapper.getLocation()); + sdCopy.setParameters(wrapper.getParameters()); + t.setSd(sdCopy); + } + return t; + } + + static Partition assemble(PartitionWrapper wrapper, SharedCache sharedCache) { + Partition p = wrapper.getPartition().deepCopy(); + if (wrapper.getSdHash()!=null) { + StorageDescriptor sdCopy = sharedCache.getSdFromCache(wrapper.getSdHash()).deepCopy(); + if (sdCopy.getBucketCols()==null) { + sdCopy.setBucketCols(new ArrayList<>()); + } + if (sdCopy.getSortCols()==null) { + sdCopy.setSortCols(new ArrayList<>()); + } + if (sdCopy.getSkewedInfo()==null) { + sdCopy.setSkewedInfo(new SkewedInfo(new ArrayList<>(), + new ArrayList<>(), new HashMap<>())); + } + sdCopy.setLocation(wrapper.getLocation()); + sdCopy.setParameters(wrapper.getParameters()); + p.setSd(sdCopy); + } + return p; + } + + public static boolean matches(String name, String pattern) { + String[] subpatterns = pattern.trim().split("\\|"); + for (String subpattern : subpatterns) { + subpattern = "(?i)" + subpattern.replaceAll("\\?", ".{1}").replaceAll("\\*", ".*") + .replaceAll("\\^", "\\\\^").replaceAll("\\$", "\\\\$"); + if (Pattern.matches(subpattern, StringUtils.normalizeIdentifier(name))) { + return true; + } + } + return false; + } +}
