Author: zshao
Date: Fri Sep 19 17:06:07 2008
New Revision: 697293
URL: http://svn.apache.org/viewvc?rev=697293&view=rev
Log:
HADOOP 4205. Fixed compilation problems.
Added:
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/HiveKey.java
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/ListTypeInfo.java
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/MapTypeInfo.java
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/PrimitiveTypeInfo.java
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/StructTypeInfo.java
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/TypeInfo.java
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/TypeInfoFactory.java
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/TypeInfoUtils.java
hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl4.q
hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl5.q
hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/inputddl4.q.out
hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/inputddl5.q.out
Removed:
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/CompositeHiveObject.java
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/HiveObject.java
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/LabeledCompositeHiveObject.java
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/NullHiveObject.java
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/PrimitiveHiveObject.java
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/exec/TableHiveObject.java
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/HiveObjectComparator.java
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/HiveObjectSerializer.java
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/NaiiveJSONSerializer.java
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/NaiiveSerializer.java
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/NoTagHiveObjectComparator.java
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/NoTagWritableComparableHiveObject.java
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/NoTagWritableHiveObject.java
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/WritableComparableHiveObject.java
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/WritableHiveObject.java
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeInfo.java
hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestCompositeHiveObject.java
hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestWritables.java
Modified:
hadoop/core/trunk/src/contrib/hive/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
Modified:
hadoop/core/trunk/src/contrib/hive/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java?rev=697293&r1=697292&r2=697293&view=diff
==============================================================================
---
hadoop/core/trunk/src/contrib/hive/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
(original)
+++
hadoop/core/trunk/src/contrib/hive/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
Fri Sep 19 17:06:07 2008
@@ -82,27 +82,30 @@
e.printStackTrace();
}
- }
- else {
+ } else {
ret = qp.run(cmd);
Vector<Vector<String>> res = new Vector<Vector<String>>();
- while (qp.getResults(res)) {
- SessionState ss = SessionState.get();
- PrintStream out = ss.out;
-
- for (Vector<String> row:res)
+ while (qp.getResults(res))
+ {
+ SessionState ss = SessionState.get();
+ OutputStream out = ss.out;
+ try
{
- boolean firstCol = true;
- for (String col:row)
+
+ for (Vector<String> row:res)
{
- if (!firstCol)
+ for (String col:row)
+ {
+ out.write(col == null ? Utilities.nullStringOutput.getBytes() :
col.getBytes());
out.write(Utilities.tabCode);
- out.print(col == null ? Utilities.nullStringOutput : col);
- firstCol = false;
- }
- out.write(Utilities.newLineCode);
+ }
+ out.write(Utilities.newLineCode);
+ }
+ res.clear();
+
+ } catch (IOException e) {
+ e.printStackTrace();
}
- res.clear();
}
}
return ret;
@@ -148,20 +151,16 @@
SessionState.initHiveLog4j();
CliSessionState ss = new CliSessionState (new
HiveConf(SessionState.class));
- ss.in = System.in;
- try {
- ss.out = new PrintStream(System.out, true, "UTF-8");
- ss.err = new PrintStream(System.err, true, "UTF-8");
- } catch (UnsupportedEncodingException e) {
- System.exit(3);
- }
-
SessionState.start(ss);
if(! oproc.process_stage2(ss)) {
System.exit(2);
}
+ ss.in = System.in;
+ ss.out = System.out;
+ ss.err = System.err;
+
sp = new SetProcessor();
qp = new Driver();
Added:
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/HiveKey.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/HiveKey.java?rev=697293&view=auto
==============================================================================
---
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/HiveKey.java
(added)
+++
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/io/HiveKey.java
Fri Sep 19 17:06:07 2008
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.io;
+
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.WritableComparator;
+
+/** HiveKey is a simple wrapper on Text which allows us to set the hashCode
easily.
+ * hashCode is used for hadoop partitioner.
+ */
+public class HiveKey extends BytesWritable {
+
+ private static final int LENGTH_BYTES = 4;
+
+ boolean hashCodeValid;
+ public HiveKey() {
+ hashCodeValid = false;
+ }
+
+ protected int myHashCode;
+ public void setHashCode(int myHashCode) {
+ this.hashCodeValid = true;
+ this.myHashCode = myHashCode;
+ }
+ public int hashCode() {
+ if (!hashCodeValid) {
+ throw new RuntimeException("Cannot get hashCode() from deserialized " +
HiveKey.class);
+ }
+ return myHashCode;
+ }
+
+ /** A Comparator optimized for HiveKey. */
+ public static class Comparator extends WritableComparator {
+ public Comparator() {
+ super(HiveKey.class);
+ }
+
+ /**
+ * Compare the buffers in serialized form.
+ */
+ public int compare(byte[] b1, int s1, int l1,
+ byte[] b2, int s2, int l2) {
+ return compareBytes(b1, s1+LENGTH_BYTES, l1-LENGTH_BYTES,
+ b2, s2+LENGTH_BYTES, l2-LENGTH_BYTES);
+ }
+ }
+
+ static {
+ WritableComparator.define(HiveKey.class, new Comparator());
+ }
+}
Added:
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/ListTypeInfo.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/ListTypeInfo.java?rev=697293&view=auto
==============================================================================
---
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/ListTypeInfo.java
(added)
+++
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/ListTypeInfo.java
Fri Sep 19 17:06:07 2008
@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.typeinfo;
+
+import java.io.Serializable;
+import java.util.List;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+
+/** A List Type has homogeneous elements. All elements of the List has
+ * the same TypeInfo which is returned by getListElementTypeInfo.
+ *
+ * Always use the TypeInfoFactory to create new TypeInfo objects, instead
+ * of directly creating an instance of this class.
+ */
+public class ListTypeInfo extends TypeInfo implements Serializable {
+
+ private static final long serialVersionUID = 1L;
+ TypeInfo listElementTypeInfo;
+
+ /** For java serialization use only.
+ */
+ public ListTypeInfo() {}
+
+ public String getTypeName() {
+ return org.apache.hadoop.hive.serde.Constants.LIST_TYPE_NAME
+ + "<" + listElementTypeInfo.getTypeName() + ">";
+ }
+
+ /** For java serialization use only.
+ */
+ public void setListElementTypeInfo(TypeInfo listElementTypeInfo) {
+ this.listElementTypeInfo = listElementTypeInfo;
+ }
+
+ /** For TypeInfoFactory use only.
+ */
+ ListTypeInfo(TypeInfo elementTypeInfo) {
+ this.listElementTypeInfo = elementTypeInfo;
+ }
+
+ public Category getCategory() {
+ return Category.LIST;
+ }
+
+ public TypeInfo getListElementTypeInfo() {
+ return listElementTypeInfo;
+ }
+
+ public boolean equals(Object other) {
+ if (this == other) return true;
+ if (!(other instanceof TypeInfo)) {
+ return false;
+ }
+ TypeInfo o = (TypeInfo) other;
+ return o.getCategory().equals(getCategory())
+ && o.getListElementTypeInfo().equals(getListElementTypeInfo());
+ }
+
+ public int hashCode() {
+ return listElementTypeInfo.hashCode();
+ }
+
+}
Added:
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/MapTypeInfo.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/MapTypeInfo.java?rev=697293&view=auto
==============================================================================
---
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/MapTypeInfo.java
(added)
+++
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/MapTypeInfo.java
Fri Sep 19 17:06:07 2008
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.typeinfo;
+
+import java.io.Serializable;
+import java.util.List;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+
+/** A Map Type has homogeneous keys and homogeneous values.
+ * All keys of the Map have the same TypeInfo, which is returned by
+ * getMapKeyTypeInfo(); and all values of the Map has the same TypeInfo,
+ * which is returned by getMapValueTypeInfo().
+ *
+ * Always use the TypeInfoFactory to create new TypeInfo objects, instead
+ * of directly creating an instance of this class.
+ */
+public class MapTypeInfo extends TypeInfo implements Serializable{
+
+ private static final long serialVersionUID = 1L;
+
+ TypeInfo mapKeyTypeInfo;
+ TypeInfo mapValueTypeInfo;
+
+ /** For java serialization use only.
+ */
+ public MapTypeInfo() {}
+
+ public String getTypeName() {
+ return org.apache.hadoop.hive.serde.Constants.MAP_TYPE_NAME
+ + "<" + mapKeyTypeInfo.getTypeName() + ","
+ + mapValueTypeInfo.getTypeName() + ">";
+ }
+
+ /** For java serialization use only.
+ */
+ public void setMapKeyTypeInfo(TypeInfo mapKeyTypeInfo) {
+ this.mapKeyTypeInfo = mapKeyTypeInfo;
+ }
+
+ /** For java serialization use only.
+ */
+ public void setMapValueTypeInfo(TypeInfo mapValueTypeInfo) {
+ this.mapValueTypeInfo = mapValueTypeInfo;
+ }
+
+ // For TypeInfoFactory use only
+ MapTypeInfo(TypeInfo keyTypeInfo, TypeInfo valueTypeInfo) {
+ this.mapKeyTypeInfo = keyTypeInfo;
+ this.mapValueTypeInfo = valueTypeInfo;
+ }
+
+ public Category getCategory() {
+ return Category.MAP;
+ }
+
+ public TypeInfo getMapKeyTypeInfo() {
+ return mapKeyTypeInfo;
+ }
+
+ public TypeInfo getMapValueTypeInfo() {
+ return mapValueTypeInfo;
+ }
+
+ public boolean equals(Object other) {
+ if (this == other) return true;
+ if (!(other instanceof TypeInfo)) {
+ return false;
+ }
+ TypeInfo o = (TypeInfo) other;
+ return o.getCategory().equals(getCategory())
+ && o.getMapKeyTypeInfo().equals(getMapKeyTypeInfo())
+ && o.getMapValueTypeInfo().equals(getMapValueTypeInfo());
+ }
+
+ public int hashCode() {
+ return mapKeyTypeInfo.hashCode() ^ mapValueTypeInfo.hashCode();
+ }
+
+
+}
Added:
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/PrimitiveTypeInfo.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/PrimitiveTypeInfo.java?rev=697293&view=auto
==============================================================================
---
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/PrimitiveTypeInfo.java
(added)
+++
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/PrimitiveTypeInfo.java
Fri Sep 19 17:06:07 2008
@@ -0,0 +1,84 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.typeinfo;
+
+import java.io.Serializable;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+
+
+/** There are limited number of Primitive Types.
+ * All Primitive Types are defined by TypeInfoFactory.isPrimitiveClass().
+ *
+ * Always use the TypeInfoFactory to create new TypeInfo objects, instead
+ * of directly creating an instance of this class.
+ */
+public class PrimitiveTypeInfo extends TypeInfo implements Serializable {
+
+ private static final long serialVersionUID = 1L;
+
+ Class primitiveClass;
+
+ /** For java serialization use only.
+ */
+ public PrimitiveTypeInfo() {}
+
+ public String getTypeName() {
+ return ObjectInspectorUtils.getClassShortName(primitiveClass.getName());
+ }
+
+
+ /** For java serialization use only.
+ */
+ public void setPrimitiveClass(Class primitiveClass) {
+ this.primitiveClass = primitiveClass;
+ }
+
+ /** For TypeInfoFactory use only.
+ */
+ PrimitiveTypeInfo(Class primitiveClass) {
+ this.primitiveClass = primitiveClass;
+ }
+
+ public Category getCategory() {
+ return Category.PRIMITIVE;
+ }
+
+ public Class getPrimitiveClass() {
+ return primitiveClass;
+ }
+
+ public boolean equals(Object other) {
+ if (this == other) return true;
+ if (!(other instanceof TypeInfo)) {
+ return false;
+ }
+ TypeInfo o = (TypeInfo) other;
+ return o.getCategory().equals(getCategory())
+ && o.getPrimitiveClass().equals(getPrimitiveClass());
+ }
+
+ public int hashCode() {
+ return primitiveClass.hashCode();
+ }
+
+}
Added:
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/StructTypeInfo.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/StructTypeInfo.java?rev=697293&view=auto
==============================================================================
---
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/StructTypeInfo.java
(added)
+++
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/StructTypeInfo.java
Fri Sep 19 17:06:07 2008
@@ -0,0 +1,123 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.typeinfo;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+
+/** StructTypeInfo represents the TypeInfo of a struct.
+ * A struct contains one or more fields each of which has a unique name
+ * and its own TypeInfo. Different fields can have the same or different
+ * TypeInfo.
+ *
+ * Always use the TypeInfoFactory to create new TypeInfo objects, instead
+ * of directly creating an instance of this class.
+ */
+public class StructTypeInfo extends TypeInfo implements Serializable{
+
+ private static final long serialVersionUID = 1L;
+
+ ArrayList<String> allStructFieldNames;
+ ArrayList<TypeInfo> allStructFieldTypeInfos;
+
+ /** For java serialization use only.
+ */
+ public StructTypeInfo() {}
+
+ public String getTypeName() {
+ StringBuilder sb = new StringBuilder();
+ sb.append("struct{");
+ for(int i=0; i<allStructFieldNames.size(); i++) {
+ if (i>0) sb.append(",");
+ sb.append(allStructFieldNames.get(i));
+ sb.append(":");
+ sb.append(allStructFieldTypeInfos.get(i).getTypeName());
+ }
+ sb.append("}");
+ return sb.toString();
+ }
+
+ /** For java serialization use only.
+ */
+ public void setAllStructFieldNames(ArrayList<String> allStructFieldNames) {
+ this.allStructFieldNames = allStructFieldNames;
+ }
+
+ /** For java serialization use only.
+ */
+ public void setAllStructFieldTypeInfos(
+ ArrayList<TypeInfo> allStructFieldTypeInfos) {
+ this.allStructFieldTypeInfos = allStructFieldTypeInfos;
+ }
+
+ /** For TypeInfoFactory use only.
+ */
+ StructTypeInfo(List<String> names, List<TypeInfo> typeInfos) {
+ assert(allStructFieldNames.size() == typeInfos.size());
+ allStructFieldNames = new ArrayList<String>();
+ allStructFieldNames.addAll(names);
+ allStructFieldTypeInfos = new ArrayList<TypeInfo>();
+ allStructFieldTypeInfos.addAll(typeInfos);
+ }
+
+ public Category getCategory() {
+ return Category.STRUCT;
+ }
+
+ public List<String> getAllStructFieldNames() {
+ return java.util.Collections.unmodifiableList(allStructFieldNames);
+ }
+
+ public List<TypeInfo> getAllStructFieldTypeInfos() {
+ return java.util.Collections.unmodifiableList(allStructFieldTypeInfos);
+ }
+
+ public TypeInfo getStructFieldTypeInfo(String field) {
+ for(int i=0; i<allStructFieldNames.size(); i++) {
+ if (field.equals(allStructFieldNames.get(i))) {
+ return allStructFieldTypeInfos.get(i);
+ }
+ }
+ throw new RuntimeException("cannot find field " + field + " in " +
allStructFieldNames);
+ // return null;
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (this == other) return true;
+ if (!(other instanceof TypeInfo)) {
+ return false;
+ }
+ TypeInfo o = (TypeInfo) other;
+ return o.getCategory().equals(getCategory())
+ && o.getAllStructFieldNames().equals(getAllStructFieldNames())
+ && o.getAllStructFieldTypeInfos().equals(getAllStructFieldTypeInfos());
+ }
+
+ public int hashCode() {
+ return allStructFieldNames.hashCode() ^ allStructFieldTypeInfos.hashCode();
+ }
+
+}
Added:
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/TypeInfo.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/TypeInfo.java?rev=697293&view=auto
==============================================================================
---
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/TypeInfo.java
(added)
+++
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/TypeInfo.java
Fri Sep 19 17:06:07 2008
@@ -0,0 +1,75 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.typeinfo;
+
+import java.io.Serializable;import java.util.List;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+
+/**
+ * Stores information about a type (DDL).
+ * Always use the TypeInfoFactory to create new TypeInfo objects.
+ *
+ * We support 4 categories of types:
+ * 1. Primitive objects (String, Number, etc)
+ * 2. List objects (a list of objects of a single type)
+ * 3. Map objects (a map from objects of one type to objects of another type)
+ * 4. Struct objects (a list of fields with names and their own types)
+ */
+public abstract class TypeInfo implements Serializable {
+
+ protected TypeInfo() {}
+
+ public String getTypeName() {
+ throw new RuntimeException("Unsupported: " + this.getClass() +
".getCategory()");
+ }
+
+ public Category getCategory() {
+ throw new RuntimeException("Unsupported: " + this.getClass() +
".getCategory()");
+ }
+
+ public Class<?> getPrimitiveClass() {
+ throw new RuntimeException("Unsupported: " + this.getClass() +
".getPrimitiveClass()");
+ }
+
+ public TypeInfo getListElementTypeInfo() {
+ throw new RuntimeException("Unsupported: " + this.getClass() +
".getListElementTypeInfo()");
+ }
+
+ public TypeInfo getMapKeyTypeInfo() {
+ throw new RuntimeException("Unsupported: " + this.getClass() +
".getMapKeyTypeInfo()");
+ }
+ public TypeInfo getMapValueTypeInfo() {
+ throw new RuntimeException("Unsupported: " + this.getClass() +
".getMapValueTypeInfo()");
+ }
+
+ public List<String> getAllStructFieldNames() {
+ throw new RuntimeException("Unsupported: " + this.getClass() +
".getAllStructFieldNames()");
+ }
+ public List<TypeInfo> getAllStructFieldTypeInfos() {
+ throw new RuntimeException("Unsupported: " + this.getClass() +
".getAllStructFieldTypeInfos()");
+ }
+ public TypeInfo getStructFieldTypeInfo(String field) {
+ throw new RuntimeException("Unsupported: " + this.getClass() +
".getStructFieldTypeInfo()");
+ }
+
+ public String toString() {
+ return getTypeName();
+ }
+
+}
Added:
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/TypeInfoFactory.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/TypeInfoFactory.java?rev=697293&view=auto
==============================================================================
---
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/TypeInfoFactory.java
(added)
+++
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/TypeInfoFactory.java
Fri Sep 19 17:06:07 2008
@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.typeinfo;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+
+/**
+ * TypeInfoFactory can be used to create the TypeInfo object for any types.
+ *
+ * TypeInfo objects are all read-only so we can reuse them easily.
TypeInfoFactory
+ * has internal cache to make sure we don't create 2 TypeInfo objects that
represents the
+ * same type.
+ */
+public class TypeInfoFactory {
+
+ static HashMap<Class<?>, TypeInfo> cachedPrimitiveTypeInfo = new
HashMap<Class<?>, TypeInfo>();
+ public static TypeInfo getPrimitiveTypeInfo(Class<?> primitiveClass) {
+ assert(ObjectInspectorUtils.isPrimitiveClass(primitiveClass));
+ primitiveClass = ObjectInspectorUtils.generalizePrimitive(primitiveClass);
+ TypeInfo result = cachedPrimitiveTypeInfo.get(primitiveClass);
+ if (result == null) {
+ result = new PrimitiveTypeInfo(primitiveClass);
+ cachedPrimitiveTypeInfo.put(primitiveClass, result);
+ }
+ return result;
+ }
+
+ static HashMap<ArrayList<List<?>>, TypeInfo> cachedStructTypeInfo = new
HashMap<ArrayList<List<?>>, TypeInfo>();
+ public static TypeInfo getStructTypeInfo(List<String> names, List<TypeInfo>
typeInfos) {
+ ArrayList<List<?>> signature = new ArrayList<List<?>>(2);
+ signature.add(names);
+ signature.add(typeInfos);
+ TypeInfo result = cachedStructTypeInfo.get(signature);
+ if (result == null) {
+ result = new StructTypeInfo(names, typeInfos);
+ cachedStructTypeInfo.put(signature, result);
+ }
+ return result;
+ }
+
+ static HashMap<TypeInfo, TypeInfo> cachedListTypeInfo = new
HashMap<TypeInfo, TypeInfo>();
+ public static TypeInfo getListTypeInfo(TypeInfo elementTypeInfo) {
+ TypeInfo result = cachedListTypeInfo.get(elementTypeInfo);
+ if (result == null) {
+ result = new ListTypeInfo(elementTypeInfo);
+ cachedListTypeInfo.put(elementTypeInfo, result);
+ }
+ return result;
+ }
+
+ static HashMap<ArrayList<TypeInfo>, TypeInfo> cachedMapTypeInfo = new
HashMap<ArrayList<TypeInfo>, TypeInfo>();
+ public static TypeInfo getMapTypeInfo(TypeInfo keyTypeInfo, TypeInfo
valueTypeInfo) {
+ ArrayList<TypeInfo> signature = new ArrayList<TypeInfo>(2);
+ signature.add(keyTypeInfo);
+ signature.add(valueTypeInfo);
+ TypeInfo result = cachedMapTypeInfo.get(signature);
+ if (result == null) {
+ result = new MapTypeInfo(keyTypeInfo, valueTypeInfo);
+ cachedMapTypeInfo.put(signature, result);
+ }
+ return result;
+ }
+
+
+}
Added:
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/TypeInfoUtils.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/TypeInfoUtils.java?rev=697293&view=auto
==============================================================================
---
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/TypeInfoUtils.java
(added)
+++
hadoop/core/trunk/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/TypeInfoUtils.java
Fri Sep 19 17:06:07 2008
@@ -0,0 +1,112 @@
+package org.apache.hadoop.hive.ql.typeinfo;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+
+public class TypeInfoUtils {
+
+ static HashMap<TypeInfo, ObjectInspector> cachedStandardObjectInspector =
new HashMap<TypeInfo, ObjectInspector>();
+ /**
+ * Returns the standard object inspector that can be used to translate an
object of that typeInfo
+ * to a standard object type.
+ */
+ public static ObjectInspector
getStandardObjectInspectorFromTypeInfo(TypeInfo typeInfo) {
+ ObjectInspector result = cachedStandardObjectInspector.get(typeInfo);
+ if (result == null) {
+ switch(typeInfo.getCategory()) {
+ case PRIMITIVE: {
+ result =
ObjectInspectorFactory.getStandardPrimitiveObjectInspector(typeInfo.getPrimitiveClass());
+ break;
+ }
+ case LIST: {
+ ObjectInspector elementObjectInspector =
getStandardObjectInspectorFromTypeInfo(typeInfo.getListElementTypeInfo());
+ result =
ObjectInspectorFactory.getStandardListObjectInspector(elementObjectInspector);
+ break;
+ }
+ case MAP: {
+ ObjectInspector keyObjectInspector =
getStandardObjectInspectorFromTypeInfo(typeInfo.getMapKeyTypeInfo());
+ ObjectInspector valueObjectInspector =
getStandardObjectInspectorFromTypeInfo(typeInfo.getMapValueTypeInfo());
+ result =
ObjectInspectorFactory.getStandardMapObjectInspector(keyObjectInspector,
valueObjectInspector);
+ break;
+ }
+ case STRUCT: {
+ List<String> fieldNames = typeInfo.getAllStructFieldNames();
+ List<TypeInfo> fieldTypeInfos =
typeInfo.getAllStructFieldTypeInfos();
+ List<ObjectInspector> fieldObjectInspectors = new
ArrayList<ObjectInspector>(fieldTypeInfos.size());
+ for(int i=0; i<fieldTypeInfos.size(); i++) {
+
fieldObjectInspectors.add(getStandardObjectInspectorFromTypeInfo(fieldTypeInfos.get(i)));
+ }
+ result =
ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames,
fieldObjectInspectors);
+ break;
+ }
+ default: {
+ result = null;
+ }
+ }
+ cachedStandardObjectInspector.put(typeInfo, result);
+ }
+ return result;
+ }
+
+
+ /**
+ * Get the TypeInfo object from the ObjectInspector object by recursively
going into the
+ * ObjectInspector structure.
+ */
+ public static TypeInfo getTypeInfoFromObjectInspector(ObjectInspector oi) {
+// OPTIMIZATION for later.
+// if (oi instanceof TypeInfoBasedObjectInspector) {
+// TypeInfoBasedObjectInspector typeInfoBasedObjectInspector =
(ObjectInspector)oi;
+// return typeInfoBasedObjectInspector.getTypeInfo();
+// }
+
+ // Recursively going into ObjectInspector structure
+ TypeInfo result = null;
+ switch (oi.getCategory()) {
+ case PRIMITIVE: {
+ PrimitiveObjectInspector poi =(PrimitiveObjectInspector)oi;
+ result = TypeInfoFactory.getPrimitiveTypeInfo(poi.getPrimitiveClass());
+ break;
+ }
+ case LIST: {
+ ListObjectInspector loi = (ListObjectInspector)oi;
+ result = TypeInfoFactory.getListTypeInfo(
+
getTypeInfoFromObjectInspector(loi.getListElementObjectInspector()));
+ break;
+ }
+ case MAP: {
+ MapObjectInspector moi = (MapObjectInspector)oi;
+ result = TypeInfoFactory.getMapTypeInfo(
+ getTypeInfoFromObjectInspector(moi.getMapKeyObjectInspector()),
+ getTypeInfoFromObjectInspector(moi.getMapValueObjectInspector()));
+ break;
+ }
+ case STRUCT: {
+ StructObjectInspector soi = (StructObjectInspector)oi;
+ List<? extends StructField> fields = soi.getAllStructFieldRefs();
+ List<String> fieldNames = new ArrayList<String>(fields.size());
+ List<TypeInfo> fieldTypeInfos = new ArrayList<TypeInfo>(fields.size());
+ for(StructField f : fields) {
+ fieldNames.add(f.getFieldName());
+
fieldTypeInfos.add(getTypeInfoFromObjectInspector(f.getFieldObjectInspector()));
+ }
+ result = TypeInfoFactory.getStructTypeInfo(fieldNames, fieldTypeInfos);
+ break;
+ }
+ default: {
+ throw new RuntimeException("Unknown ObjectInspector category!");
+ }
+ }
+ return result;
+ }
+
+}
Added:
hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl4.q
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl4.q?rev=697293&view=auto
==============================================================================
---
hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl4.q
(added)
+++
hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl4.q
Fri Sep 19 17:06:07 2008
@@ -0,0 +1,10 @@
+-- a simple test to test sorted/clustered syntax
+CREATE TABLE INPUTDDL4(viewTime DATETIME, userid INT,
+ page_url STRING, referrer_url STRING,
+ friends ARRAY<BIGINT>, properties MAP<STRING, STRING>,
+ ip STRING COMMENT 'IP Address of the User')
+ COMMENT 'This is the page view table'
+ PARTITIONED BY(ds DATETIME, country STRING)
+ CLUSTERED BY(userid) SORTED BY(viewTime) INTO 32 BUCKETS;
+DESCRIBE INPUTDDL4;
+DROP TABLE INPUTDDL4;
Added:
hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl5.q
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl5.q?rev=697293&view=auto
==============================================================================
---
hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl5.q
(added)
+++
hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl5.q
Fri Sep 19 17:06:07 2008
@@ -0,0 +1,8 @@
+-- test for internationalization
+-- kv4.txt contains the utf-8 character 0xE982B5E993AE which we are verifying
later on
+CREATE TABLE INPUTDDL5(name STRING);
+LOAD DATA LOCAL INPATH '../data/files/kv4.txt' INTO TABLE INPUTDDL5;
+DESCRIBE INPUTDDL5;
+SELECT INPUTDDL5.name from INPUTDDL5;
+SELECT count(1) FROM INPUTDDL5 WHERE INPUTDDL5.name = _UTF-8 0xE982B5E993AE;
+DROP TABLE INPUTDDL5;
Added:
hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/inputddl4.q.out
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/inputddl4.q.out?rev=697293&view=auto
==============================================================================
---
hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/inputddl4.q.out
(added)
+++
hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/inputddl4.q.out
Fri Sep 19 17:06:07 2008
@@ -0,0 +1,9 @@
+viewtime datetime
+userid int
+page_url string
+referrer_url string
+friends array<bigint>
+properties map<string,string>
+ip string 'IP Address of the User'
+ds datetime
+country string
Added:
hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/inputddl5.q.out
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/inputddl5.q.out?rev=697293&view=auto
==============================================================================
---
hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/inputddl5.q.out
(added)
+++
hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/inputddl5.q.out
Fri Sep 19 17:06:07 2008
@@ -0,0 +1,5 @@
+name string
+éµé®
+
+1
+