Modified: hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/AccumuloHiveRow.java URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/AccumuloHiveRow.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/AccumuloHiveRow.java (original) +++ hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/AccumuloHiveRow.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hive.accumulo; import java.io.DataInput;
Modified: hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/AccumuloStorageHandler.java URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/AccumuloStorageHandler.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/AccumuloStorageHandler.java (original) +++ hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/AccumuloStorageHandler.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hive.accumulo; import java.io.IOException; Modified: hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/LazyAccumuloRow.java URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/LazyAccumuloRow.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/LazyAccumuloRow.java (original) +++ hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/LazyAccumuloRow.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,19 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package org.apache.hadoop.hive.accumulo; import java.util.ArrayList; Modified: hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloTableInputFormat.java URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloTableInputFormat.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloTableInputFormat.java (original) +++ hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloTableInputFormat.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hive.accumulo.mr; import java.io.IOException; Modified: hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/AccumuloPredicateHandler.java URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/AccumuloPredicateHandler.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/AccumuloPredicateHandler.java (original) +++ hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/AccumuloPredicateHandler.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hive.accumulo.predicate; import java.util.ArrayList; Modified: hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PrimitiveComparisonFilter.java URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PrimitiveComparisonFilter.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PrimitiveComparisonFilter.java (original) +++ hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PrimitiveComparisonFilter.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hive.accumulo.predicate; import java.io.IOException; Modified: hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/CompareOp.java URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/CompareOp.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/CompareOp.java (original) +++ hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/CompareOp.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hive.accumulo.predicate.compare; /** Modified: hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/DoubleCompare.java URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/DoubleCompare.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/DoubleCompare.java (original) +++ hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/DoubleCompare.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hive.accumulo.predicate.compare; import java.math.BigDecimal; Modified: hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/Equal.java URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/Equal.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/Equal.java (original) +++ hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/Equal.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hive.accumulo.predicate.compare; /** Modified: hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/GreaterThan.java URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/GreaterThan.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/GreaterThan.java (original) +++ hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/GreaterThan.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hive.accumulo.predicate.compare; /** Modified: hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/GreaterThanOrEqual.java URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/GreaterThanOrEqual.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/GreaterThanOrEqual.java (original) +++ hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/GreaterThanOrEqual.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hive.accumulo.predicate.compare; /** Modified: hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/IntCompare.java URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/IntCompare.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/IntCompare.java (original) +++ hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/IntCompare.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hive.accumulo.predicate.compare; import java.nio.ByteBuffer; Modified: hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/LessThan.java URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/LessThan.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/LessThan.java (original) +++ hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/LessThan.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hive.accumulo.predicate.compare; /** Modified: hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/LessThanOrEqual.java URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/LessThanOrEqual.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/LessThanOrEqual.java (original) +++ hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/LessThanOrEqual.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hive.accumulo.predicate.compare; /** Modified: hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/Like.java URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/Like.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/Like.java (original) +++ hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/Like.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hive.accumulo.predicate.compare; /** Modified: hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/LongCompare.java URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/LongCompare.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/LongCompare.java (original) +++ hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/LongCompare.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hive.accumulo.predicate.compare; import java.nio.ByteBuffer; Modified: hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/NotEqual.java URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/NotEqual.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/NotEqual.java (original) +++ hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/NotEqual.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hive.accumulo.predicate.compare; /** Modified: hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/PrimitiveComparison.java URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/PrimitiveComparison.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/PrimitiveComparison.java (original) +++ hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/PrimitiveComparison.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hive.accumulo.predicate.compare; /** Modified: hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/StringCompare.java URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/StringCompare.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/StringCompare.java (original) +++ hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/StringCompare.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hive.accumulo.predicate.compare; import java.util.regex.Pattern; Modified: hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDe.java URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDe.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDe.java (original) +++ hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDe.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hive.accumulo.serde; import java.io.IOException; Modified: hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTableInputFormat.java URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTableInputFormat.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTableInputFormat.java (original) +++ hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTableInputFormat.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,19 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package org.apache.hadoop.hive.accumulo.mr; import static org.junit.Assert.assertArrayEquals; Modified: hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestAccumuloPredicateHandler.java URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestAccumuloPredicateHandler.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestAccumuloPredicateHandler.java (original) +++ hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestAccumuloPredicateHandler.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hive.accumulo.predicate; import static org.junit.Assert.assertArrayEquals; Modified: hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestDoubleCompare.java URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestDoubleCompare.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestDoubleCompare.java (original) +++ hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestDoubleCompare.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,19 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package org.apache.hadoop.hive.accumulo.predicate.compare; import static org.junit.Assert.assertEquals; Modified: hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestIntCompare.java URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestIntCompare.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestIntCompare.java (original) +++ hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestIntCompare.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,19 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package org.apache.hadoop.hive.accumulo.predicate.compare; import static org.junit.Assert.assertEquals; Modified: hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestLongComparison.java URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestLongComparison.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestLongComparison.java (original) +++ hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestLongComparison.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hive.accumulo.predicate.compare; import static org.junit.Assert.assertEquals; Modified: hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestStringCompare.java URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestStringCompare.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestStringCompare.java (original) +++ hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/compare/TestStringCompare.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hive.accumulo.predicate.compare; import static org.junit.Assert.assertFalse; Modified: hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloSerDe.java URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloSerDe.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloSerDe.java (original) +++ hive/branches/spark/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloSerDe.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hive.accumulo.serde; import static org.junit.Assert.assertEquals; Modified: hive/branches/spark/beeline/src/java/org/apache/hive/beeline/Commands.java URL: http://svn.apache.org/viewvc/hive/branches/spark/beeline/src/java/org/apache/hive/beeline/Commands.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/beeline/src/java/org/apache/hive/beeline/Commands.java (original) +++ hive/branches/spark/beeline/src/java/org/apache/hive/beeline/Commands.java Tue Nov 18 00:48:40 2014 @@ -725,7 +725,7 @@ public class Commands { String extra = beeLine.getConsoleReader().readLine(prompt.toString()); if (!beeLine.isComment(extra)) { - line += " " + extra; + line += "\n" + extra; } } } catch (Exception e) { Modified: hive/branches/spark/bin/ext/beeline.sh URL: http://svn.apache.org/viewvc/hive/branches/spark/bin/ext/beeline.sh?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/bin/ext/beeline.sh (original) +++ hive/branches/spark/bin/ext/beeline.sh Tue Nov 18 00:48:40 2014 @@ -25,7 +25,8 @@ beeline () { superCsvJarPath=`ls ${HIVE_LIB}/super-csv-*.jar` jlineJarPath=`ls ${HIVE_LIB}/jline-*.jar` jdbcStandaloneJarPath=`ls ${HIVE_LIB}/hive-jdbc-*-standalone.jar` - export HADOOP_CLASSPATH=${beelineJarPath}:${superCsvJarPath}:${jlineJarPath}:${jdbcStandaloneJarPath} + export HADOOP_CLASSPATH=${HIVE_CONF_DIR}:${beelineJarPath}:${superCsvJarPath}:${jlineJarPath}:${jdbcStandaloneJarPath} + export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS -Dlog4j.configuration=beeline-log4j.properties " exec $HADOOP jar ${beelineJarPath} $CLASS $HIVE_OPTS "$@" } Modified: hive/branches/spark/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java URL: http://svn.apache.org/viewvc/hive/branches/spark/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java (original) +++ hive/branches/spark/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java Tue Nov 18 00:48:40 2014 @@ -68,10 +68,7 @@ import org.apache.hadoop.hive.ql.process import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; -import org.apache.hadoop.hive.service.HiveClient; -import org.apache.hadoop.hive.service.HiveServerException; import org.apache.hadoop.io.IOUtils; -import org.apache.thrift.TException; import sun.misc.Signal; import sun.misc.SignalHandler; @@ -150,50 +147,7 @@ public class CliDriver { stringifyException(e)); ret = 1; } - } else if (ss.isRemoteMode()) { // remote mode -- connecting to remote hive server - HiveClient client = ss.getClient(); - PrintStream out = ss.out; - PrintStream err = ss.err; - - try { - client.execute(cmd_trimmed); - List<String> results; - do { - results = client.fetchN(LINES_TO_FETCH); - for (String line : results) { - out.println(line); - } - } while (results.size() == LINES_TO_FETCH); - } catch (HiveServerException e) { - ret = e.getErrorCode(); - if (ret != 0) { // OK if ret == 0 -- reached the EOF - String errMsg = e.getMessage(); - if (errMsg == null) { - errMsg = e.toString(); - } - ret = e.getErrorCode(); - err.println("[Hive Error]: " + errMsg); - } - } catch (TException e) { - String errMsg = e.getMessage(); - if (errMsg == null) { - errMsg = e.toString(); - } - ret = -10002; - err.println("[Thrift Error]: " + errMsg); - } finally { - try { - client.clean(); - } catch (TException e) { - String errMsg = e.getMessage(); - if (errMsg == null) { - errMsg = e.toString(); - } - err.println("[Thrift Error]: Hive server is not cleaned due to thrift exception: " - + errMsg); - } - } - } else { // local mode + } else { // local mode try { CommandProcessor proc = CommandProcessorFactory.get(tokens, (HiveConf) conf); ret = processLocalCmd(cmd, proc, ss); @@ -695,31 +649,6 @@ public class CliDriver { private int executeDriver(CliSessionState ss, HiveConf conf, OptionsProcessor oproc) throws Exception { - // connect to Hive Server - if (ss.getHost() != null) { - ss.connect(); - if (ss.isRemoteMode()) { - prompt = "[" + ss.host + ':' + ss.port + "] " + prompt; - char[] spaces = new char[prompt.length()]; - Arrays.fill(spaces, ' '); - prompt2 = new String(spaces); - } - } - - // CLI remote mode is a thin client: only load auxJars in local mode - if (!ss.isRemoteMode()) { - // hadoop-20 and above - we need to augment classpath using hiveconf - // components - // see also: code in ExecDriver.java - ClassLoader loader = conf.getClassLoader(); - String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS); - if (StringUtils.isNotBlank(auxJars)) { - loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ",")); - } - conf.setClassLoader(loader); - Thread.currentThread().setContextClassLoader(loader); - } - CliDriver cli = new CliDriver(); cli.setHiveVariables(oproc.getHiveVariables()); Modified: hive/branches/spark/cli/src/java/org/apache/hadoop/hive/cli/CliSessionState.java URL: http://svn.apache.org/viewvc/hive/branches/spark/cli/src/java/org/apache/hadoop/hive/cli/CliSessionState.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/cli/src/java/org/apache/hadoop/hive/cli/CliSessionState.java (original) +++ hive/branches/spark/cli/src/java/org/apache/hadoop/hive/cli/CliSessionState.java Tue Nov 18 00:48:40 2014 @@ -25,13 +25,6 @@ import java.util.Properties; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.hadoop.hive.service.HiveClient; -import org.apache.thrift.TException; -import org.apache.thrift.protocol.TBinaryProtocol; -import org.apache.thrift.protocol.TProtocol; -import org.apache.thrift.transport.TSocket; -import org.apache.thrift.transport.TTransport; -import org.apache.thrift.transport.TTransportException; /** * SessionState for hive cli. @@ -63,66 +56,16 @@ public class CliSessionState extends Ses */ public List<String> initFiles = new ArrayList<String>(); - /** - * host name and port number of remote Hive server - */ - protected String host; - protected int port; - - private boolean remoteMode; - - private TTransport transport; - private HiveClient client; - public CliSessionState(HiveConf conf) { super(conf); - remoteMode = false; - } - - /** - * Connect to Hive Server - */ - public void connect() throws TTransportException { - transport = new TSocket(host, port); - TProtocol protocol = new TBinaryProtocol(transport); - client = new HiveClient(protocol); - transport.open(); - remoteMode = true; - } - - public void setHost(String host) { - this.host = host; - } - - public String getHost() { - return host; - } - - public int getPort() { - return port; } @Override public void close() { try { super.close(); - if (remoteMode) { - client.clean(); - transport.close(); - } } catch (IOException ioe) { ioe.printStackTrace(); - } catch (TException e) { - e.printStackTrace(); - } + } } - - public boolean isRemoteMode() { - return remoteMode; - } - - public HiveClient getClient() { - return client; - } - } Modified: hive/branches/spark/cli/src/java/org/apache/hadoop/hive/cli/OptionsProcessor.java URL: http://svn.apache.org/viewvc/hive/branches/spark/cli/src/java/org/apache/hadoop/hive/cli/OptionsProcessor.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/cli/src/java/org/apache/hadoop/hive/cli/OptionsProcessor.java (original) +++ hive/branches/spark/cli/src/java/org/apache/hadoop/hive/cli/OptionsProcessor.java Tue Nov 18 00:48:40 2014 @@ -83,20 +83,6 @@ public class OptionsProcessor { .withDescription("Use value for given property") .create()); - // -h hostname/ippaddress - options.addOption(OptionBuilder - .hasArg() - .withArgName("hostname") - .withDescription("connecting to Hive Server on remote host") - .create('h')); - - // -p port - options.addOption(OptionBuilder - .hasArg() - .withArgName("port") - .withDescription("connecting to Hive Server on port number") - .create('p')); - // Substitution option -d, --define options.addOption(OptionBuilder .withValueSeparator() @@ -169,10 +155,6 @@ public class OptionsProcessor { ss.setIsVerbose(commandLine.hasOption('v')); - ss.host = (String) commandLine.getOptionValue('h'); - - ss.port = Integer.parseInt((String) commandLine.getOptionValue('p', "10000")); - String[] initFiles = commandLine.getOptionValues('i'); if (null != initFiles) { ss.initFiles = Arrays.asList(initFiles); Modified: hive/branches/spark/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java URL: http://svn.apache.org/viewvc/hive/branches/spark/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java (original) +++ hive/branches/spark/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java Tue Nov 18 00:48:40 2014 @@ -56,10 +56,7 @@ import org.apache.hadoop.hive.metastore. import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; -import org.apache.hadoop.hive.service.HiveClient; -import org.apache.hadoop.hive.service.HiveServerException; import org.apache.hadoop.util.Shell; -import org.apache.thrift.TException; // Cannot call class TestCliDriver since that's the name of the generated @@ -231,66 +228,6 @@ public class TestCliDriverMethods extend } - /** - * test remote execCommand - */ - public void testRemoteCall() throws Exception { - MyCliSessionState ss = new MyCliSessionState(new HiveConf(), - org.apache.hadoop.hive.cli.TestCliDriverMethods.MyCliSessionState.ClientResult.RETURN_OK); - ss.err = System.err; - ByteArrayOutputStream data = new ByteArrayOutputStream(); - ss.out = new PrintStream(data); - MyCliSessionState.start(ss); - - CliDriver cliDriver = new CliDriver(); - cliDriver.processCmd("remote command"); - assertTrue(data.toString().contains("test result")); - - } - - /** - * test remote Exception - */ - public void testServerException() throws Exception { - MyCliSessionState ss = new MyCliSessionState( - new HiveConf(), - org.apache.hadoop.hive.cli.TestCliDriverMethods.MyCliSessionState.ClientResult.RETURN_SERVER_EXCEPTION); - ByteArrayOutputStream data = new ByteArrayOutputStream(); - ss.err = new PrintStream(data); - ss.out = System.out; - MyCliSessionState.start(ss); - - CliDriver cliDriver = new CliDriver(); - cliDriver.processCmd("remote command"); - assertTrue(data.toString().contains("[Hive Error]: test HiveServerException")); - data.reset(); - - - } - - /** - * test remote Exception - */ - public void testServerTException() throws Exception { - MyCliSessionState ss = new MyCliSessionState( - new HiveConf(), - org.apache.hadoop.hive.cli.TestCliDriverMethods.MyCliSessionState.ClientResult.RETURN_T_EXCEPTION); - ByteArrayOutputStream data = new ByteArrayOutputStream(); - ss.err = new PrintStream(data); - ss.out = System.out; - MyCliSessionState.start(ss); - - CliDriver cliDriver = new CliDriver(); - cliDriver.processCmd("remote command"); - assertTrue(data.toString().contains("[Thrift Error]: test TException")); - assertTrue(data.toString().contains( - "[Thrift Error]: Hive server is not cleaned due to thrift exception: test TException")); - - } - - /** - * test remote Exception - */ public void testProcessSelectDatabase() throws Exception { CliSessionState sessinState = new CliSessionState(new HiveConf()); CliSessionState.start(sessinState); @@ -521,63 +458,4 @@ public class TestCliDriverMethods extend return status; } } - - private static class MyCliSessionState extends CliSessionState { - - public enum ClientResult { - RETURN_OK, RETURN_SERVER_EXCEPTION, RETURN_T_EXCEPTION - }; - - private final ClientResult result; - - public MyCliSessionState(HiveConf conf, ClientResult result) { - super(conf); - this.result = result; - } - - @Override - public boolean isRemoteMode() { - return true; - } - - @Override - public HiveClient getClient() { - - HiveClient result = mock(HiveClient.class); - if (ClientResult.RETURN_OK.equals(this.result)) { - List<String> fetchResult = new ArrayList<String>(1); - fetchResult.add("test result"); - try { - when(result.fetchN(anyInt())).thenReturn(fetchResult); - } catch (HiveServerException e) { - } catch (Exception e) { - } - } else if (ClientResult.RETURN_SERVER_EXCEPTION.equals(this.result)) { - HiveServerException exception = new HiveServerException("test HiveServerException", 10, - "sql state"); - try { - when(result.fetchN(anyInt())).thenThrow(exception); - - when(result.fetchN(anyInt())).thenThrow(exception); - } catch (TException e) { - ; - } - return result; - } else if (ClientResult.RETURN_T_EXCEPTION.equals(this.result)) { - TException exception = new TException("test TException"); - try { - // org.mockito.Mockito. - doThrow(exception).when(result).clean(); - when(result.fetchN(anyInt())).thenThrow(exception); - } catch (TException e) { - e.printStackTrace(); - } - return result; - } - return result; - } - - } - - } Modified: hive/branches/spark/cli/src/test/org/apache/hadoop/hive/cli/TestCliSessionState.java URL: http://svn.apache.org/viewvc/hive/branches/spark/cli/src/test/org/apache/hadoop/hive/cli/TestCliSessionState.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/cli/src/test/org/apache/hadoop/hive/cli/TestCliSessionState.java (original) +++ hive/branches/spark/cli/src/test/org/apache/hadoop/hive/cli/TestCliSessionState.java Tue Nov 18 00:48:40 2014 @@ -18,21 +18,10 @@ package org.apache.hadoop.hive.cli; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.net.InetAddress; -import java.net.ServerSocket; -import java.net.Socket; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.ql.session.SessionState; -import org.junit.AfterClass; -import org.junit.BeforeClass; import org.junit.Test; /** @@ -40,46 +29,6 @@ import org.junit.Test; */ public class TestCliSessionState { - private static TCPServer server; - private static String command = null; - - @BeforeClass - public static void start() throws Exception { - // start fake server - server = new TCPServer(); - Thread thread = new Thread(server); - thread.start(); - // wait for start server; - while (server.getPort() == 0) { - Thread.sleep(20); - } - } - - @AfterClass - public static void stop() throws IOException { - server.stop(); - } - - /** - * test CliSessionState for remote - */ - @Test - public void testConnect() throws Exception { - CliSessionState sessionState = new CliSessionState(new HiveConf()); - sessionState.port = server.getPort(); - sessionState.setHost(InetAddress.getLocalHost().getHostName()); - // check connect - sessionState.connect(); - assertTrue(sessionState.isRemoteMode()); - assertEquals(server.getPort(), sessionState.getPort()); - assertEquals(InetAddress.getLocalHost().getHostName(), sessionState.getHost()); - assertNotNull(sessionState.getClient()); - sessionState.close(); - // close should send command clean - assertEquals(command, "clean"); - - } - /** * test default db name */ @@ -89,44 +38,4 @@ public class TestCliSessionState { assertEquals(MetaStoreUtils.DEFAULT_DATABASE_NAME, SessionState.get().getCurrentDatabase()); } - - /** - * fake hive server - */ - private static class TCPServer implements Runnable { - private int port = 0; - private boolean stop = false; - private ServerSocket welcomeSocket; - - public void run() { - try { - - welcomeSocket = new ServerSocket(0); - port = welcomeSocket.getLocalPort(); - while (!stop) { - byte[] buffer = new byte[512]; - Socket connectionSocket = welcomeSocket.accept(); - InputStream input = connectionSocket.getInputStream(); - OutputStream output = connectionSocket.getOutputStream(); - int read = input.read(buffer); - // command without service bytes - command = new String(buffer, 8, read - 13); - // send derived - output.write(buffer, 0, read); - } - } catch (IOException e) { - ; - } - - } - - public int getPort() { - return port; - } - - public void stop() throws IOException { - stop = true; - welcomeSocket.close(); - } - } } Modified: hive/branches/spark/cli/src/test/org/apache/hadoop/hive/cli/TestOptionsProcessor.java URL: http://svn.apache.org/viewvc/hive/branches/spark/cli/src/test/org/apache/hadoop/hive/cli/TestOptionsProcessor.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/cli/src/test/org/apache/hadoop/hive/cli/TestOptionsProcessor.java (original) +++ hive/branches/spark/cli/src/test/org/apache/hadoop/hive/cli/TestOptionsProcessor.java Tue Nov 18 00:48:40 2014 @@ -41,8 +41,7 @@ public class TestOptionsProcessor { System.clearProperty("hivevar"); assertNull(System.getProperty("_A")); String[] args = { "-hiveconf", "_A=B", "-define", "C=D", "-hivevar", "X=Y", - "-S", "true", "-database", "testDb", "-e", "execString", "-v", "true", - "-h", "yahoo.host", "-p", "3000"}; + "-S", "true", "-database", "testDb", "-e", "execString", "-v", "true"}; // stage 1 assertTrue(processor.process_stage1(args)); @@ -55,8 +54,6 @@ public class TestOptionsProcessor { processor.process_stage2(sessionState); assertEquals("testDb", sessionState.database); assertEquals("execString", sessionState.execString); - assertEquals("yahoo.host", sessionState.host); - assertEquals(3000, sessionState.port); assertEquals(0, sessionState.initFiles.size()); assertTrue(sessionState.getIsVerbose()); sessionState.setConf(null); Modified: hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java URL: http://svn.apache.org/viewvc/hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java (original) +++ hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java Tue Nov 18 00:48:40 2014 @@ -103,13 +103,6 @@ public final class Decimal128 extends Nu private short scale; /** - * This is the actual scale detected from the value passed to this Decimal128. - * The value is always equals or less than #scale. It is used to return the correct - * decimal string from {@link #getHiveDecimalString()}. - */ - private short actualScale; - - /** * -1 means negative, 0 means zero, 1 means positive. * * @serial @@ -134,7 +127,6 @@ public final class Decimal128 extends Nu this.unscaledValue = new UnsignedInt128(); this.scale = 0; this.signum = 0; - this.actualScale = 0; } /** @@ -147,7 +139,6 @@ public final class Decimal128 extends Nu this.unscaledValue = new UnsignedInt128(o.unscaledValue); this.scale = o.scale; this.signum = o.signum; - this.actualScale = o.actualScale; } /** @@ -187,7 +178,6 @@ public final class Decimal128 extends Nu checkScaleRange(scale); this.unscaledValue = new UnsignedInt128(unscaledVal); this.scale = scale; - this.actualScale = scale; if (unscaledValue.isZero()) { this.signum = 0; } else { @@ -274,7 +264,6 @@ public final class Decimal128 extends Nu this.unscaledValue.update(o.unscaledValue); this.scale = o.scale; this.signum = o.signum; - this.actualScale = o.actualScale; return this; } @@ -303,7 +292,7 @@ public final class Decimal128 extends Nu /** * Update the value of this object with the given {@code long} with the given - * scale. + * scal. * * @param val * {@code long} value to be set to {@code Decimal128}. @@ -325,8 +314,6 @@ public final class Decimal128 extends Nu if (scale != 0) { changeScaleDestructive(scale); } - // set actualScale to 0 because there is no fractional digits on integer values - this.actualScale = 0; return this; } @@ -354,11 +341,6 @@ public final class Decimal128 extends Nu checkScaleRange(scale); this.scale = scale; - // Obtains the scale of the double value to keep a record of the original - // scale. This will be used to print the HiveDecimal string with the - // correct value scale. - this.actualScale = (short) BigDecimal.valueOf(val).scale(); - // Translate the double into sign, exponent and significand, according // to the formulae in JLS, Section 20.10.22. long valBits = Double.doubleToLongBits(val); @@ -382,10 +364,6 @@ public final class Decimal128 extends Nu exponent++; } - // Calculate the real number of fractional digits from the double value - this.actualScale -= (exponent > 0) ? exponent : 0; - this.actualScale = (this.actualScale < 0) ? 0 : this.actualScale; - // so far same as java.math.BigDecimal, but the scaling below is // specific to ANSI SQL Numeric. @@ -448,7 +426,6 @@ public final class Decimal128 extends Nu public Decimal128 update(IntBuffer buf, int precision) { int scaleAndSignum = buf.get(); this.scale = (short) (scaleAndSignum >> 16); - this.actualScale = this.scale; this.signum = (byte) (scaleAndSignum & 0xFF); this.unscaledValue.update(buf, precision); assert ((signum == 0) == unscaledValue.isZero()); @@ -465,7 +442,6 @@ public final class Decimal128 extends Nu public Decimal128 update128(IntBuffer buf) { int scaleAndSignum = buf.get(); this.scale = (short) (scaleAndSignum >> 16); - this.actualScale = this.scale; this.signum = (byte) (scaleAndSignum & 0xFF); this.unscaledValue.update128(buf); assert ((signum == 0) == unscaledValue.isZero()); @@ -482,7 +458,6 @@ public final class Decimal128 extends Nu public Decimal128 update96(IntBuffer buf) { int scaleAndSignum = buf.get(); this.scale = (short) (scaleAndSignum >> 16); - this.actualScale = this.scale; this.signum = (byte) (scaleAndSignum & 0xFF); this.unscaledValue.update96(buf); assert ((signum == 0) == unscaledValue.isZero()); @@ -499,7 +474,6 @@ public final class Decimal128 extends Nu public Decimal128 update64(IntBuffer buf) { int scaleAndSignum = buf.get(); this.scale = (short) (scaleAndSignum >> 16); - this.actualScale = this.scale; this.signum = (byte) (scaleAndSignum & 0xFF); this.unscaledValue.update64(buf); assert ((signum == 0) == unscaledValue.isZero()); @@ -516,7 +490,6 @@ public final class Decimal128 extends Nu public Decimal128 update32(IntBuffer buf) { int scaleAndSignum = buf.get(); this.scale = (short) (scaleAndSignum >> 16); - this.actualScale = this.scale; this.signum = (byte) (scaleAndSignum & 0xFF); this.unscaledValue.update32(buf); assert ((signum == 0) == unscaledValue.isZero()); @@ -537,7 +510,6 @@ public final class Decimal128 extends Nu public Decimal128 update(int[] array, int offset, int precision) { int scaleAndSignum = array[offset]; this.scale = (short) (scaleAndSignum >> 16); - this.actualScale = this.scale; this.signum = (byte) (scaleAndSignum & 0xFF); this.unscaledValue.update(array, offset + 1, precision); return this; @@ -555,7 +527,6 @@ public final class Decimal128 extends Nu public Decimal128 update128(int[] array, int offset) { int scaleAndSignum = array[offset]; this.scale = (short) (scaleAndSignum >> 16); - this.actualScale = this.scale; this.signum = (byte) (scaleAndSignum & 0xFF); this.unscaledValue.update128(array, offset + 1); return this; @@ -573,7 +544,6 @@ public final class Decimal128 extends Nu public Decimal128 update96(int[] array, int offset) { int scaleAndSignum = array[offset]; this.scale = (short) (scaleAndSignum >> 16); - this.actualScale = this.scale; this.signum = (byte) (scaleAndSignum & 0xFF); this.unscaledValue.update96(array, offset + 1); return this; @@ -591,7 +561,6 @@ public final class Decimal128 extends Nu public Decimal128 update64(int[] array, int offset) { int scaleAndSignum = array[offset]; this.scale = (short) (scaleAndSignum >> 16); - this.actualScale = this.scale; this.signum = (byte) (scaleAndSignum & 0xFF); this.unscaledValue.update64(array, offset + 1); return this; @@ -609,7 +578,6 @@ public final class Decimal128 extends Nu public Decimal128 update32(int[] array, int offset) { int scaleAndSignum = array[offset]; this.scale = (short) (scaleAndSignum >> 16); - this.actualScale = this.scale; this.signum = (byte) (scaleAndSignum & 0xFF); this.unscaledValue.update32(array, offset + 1); return this; @@ -632,6 +600,7 @@ public final class Decimal128 extends Nu * @param scale */ public Decimal128 update(BigInteger bigInt, short scale) { + this.scale = scale; this.signum = (byte) bigInt.compareTo(BigInteger.ZERO); if (signum == 0) { update(0); @@ -640,9 +609,6 @@ public final class Decimal128 extends Nu } else { unscaledValue.update(bigInt); } - this.scale = scale; - this.actualScale = scale; - return this; } @@ -765,9 +731,6 @@ public final class Decimal128 extends Nu this.unscaledValue.addDestructive(accumulated); } - this.actualScale = (short) (fractionalDigits - exponent); - this.actualScale = (this.actualScale < 0) ? 0 : this.actualScale; - int scaleAdjust = scale - fractionalDigits + exponent; if (scaleAdjust > 0) { this.unscaledValue.scaleUpTenDestructive((short) scaleAdjust); @@ -961,7 +924,6 @@ public final class Decimal128 extends Nu this.unscaledValue.scaleUpTenDestructive((short) -scaleDown); } this.scale = scale; - this.actualScale = scale; this.unscaledValue.throwIfExceedsTenToThirtyEight(); } @@ -1163,7 +1125,6 @@ public final class Decimal128 extends Nu if (this.signum == 0 || right.signum == 0) { this.zeroClear(); this.scale = newScale; - this.actualScale = newScale; return; } @@ -1193,7 +1154,6 @@ public final class Decimal128 extends Nu } this.scale = newScale; - this.actualScale = newScale; this.signum = (byte) (this.signum * right.signum); if (this.unscaledValue.isZero()) { this.signum = 0; // because of scaling down, this could happen @@ -1284,7 +1244,6 @@ public final class Decimal128 extends Nu } if (this.signum == 0) { this.scale = newScale; - this.actualScale = newScale; remainder.update(this); return; } @@ -1312,7 +1271,6 @@ public final class Decimal128 extends Nu } this.scale = newScale; - this.actualScale = newScale; this.signum = (byte) (this.unscaledValue.isZero() ? 0 : (this.signum * right.signum)); remainder.scale = scale; @@ -1773,13 +1731,17 @@ public final class Decimal128 extends Nu private int [] tmpArray = new int[2]; /** - * Returns the string representation of this value. It returns the original - * {@code actualScale} fractional part when this value was created. However, + * Returns the string representation of this value. It discards the trailing zeros + * in the fractional part to match the HiveDecimal's string representation. However, * don't use this string representation for the reconstruction of the object. * * @return string representation of this value */ public String getHiveDecimalString() { + if (this.signum == 0) { + return "0"; + } + StringBuilder buf = new StringBuilder(50); if (this.signum < 0) { buf.append('-'); @@ -1790,40 +1752,32 @@ public final class Decimal128 extends Nu int trailingZeros = tmpArray[1]; int numIntegerDigits = unscaledLength - this.scale; if (numIntegerDigits > 0) { + // write out integer part first // then write out fractional part for (int i=0; i < numIntegerDigits; i++) { buf.append(unscaled[i]); } - if (this.actualScale > 0) { + if (this.scale > trailingZeros) { buf.append('.'); - - if (trailingZeros > this.actualScale) { - for (int i=0; i < (trailingZeros - this.scale); i++) { - buf.append("0"); - } - } - - for (int i = numIntegerDigits; i < (numIntegerDigits + this.actualScale); i++) { + for (int i = numIntegerDigits; i < (unscaledLength - trailingZeros); i++) { buf.append(unscaled[i]); } } } else { + // no integer part buf.append('0'); - if (this.actualScale > 0) { + if (this.scale > trailingZeros) { + // fractional part has, starting with zeros buf.append('.'); - - if (this.actualScale > trailingZeros) { - for (int i = unscaledLength; i < this.scale; ++i) { - buf.append('0'); - } + for (int i = unscaledLength; i < this.scale; ++i) { + buf.append('0'); } - - for (int i = 0; i < (numIntegerDigits + this.actualScale); i++) { + for (int i = 0; i < (unscaledLength - trailingZeros); i++) { buf.append(unscaled[i]); } } @@ -1882,10 +1836,9 @@ public final class Decimal128 extends Nu @Override public String toString() { - return toFormalString() + "(Decimal128: scale=" + scale + ", actualScale=" - + this.actualScale + ", signum=" + signum + ", BigDecimal.toString=" - + toBigDecimal().toString() + ", unscaledValue=[" + unscaledValue.toString() - + "])"; + return toFormalString() + "(Decimal128: scale=" + scale + ", signum=" + + signum + ", BigDecimal.toString=" + toBigDecimal().toString() + + ", unscaledValue=[" + unscaledValue.toString() + "])"; } /** @@ -2003,7 +1956,6 @@ public final class Decimal128 extends Nu */ public Decimal128 fastUpdateFromInternalStorage(byte[] internalStorage, short scale) { this.scale = scale; - this.actualScale = scale; this.signum = this.unscaledValue.fastUpdateFromInternalStorage(internalStorage); return this; Modified: hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java URL: http://svn.apache.org/viewvc/hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java (original) +++ hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java Tue Nov 18 00:48:40 2014 @@ -30,6 +30,7 @@ import java.math.RoundingMode; public class HiveDecimal implements Comparable<HiveDecimal> { public static final int MAX_PRECISION = 38; public static final int MAX_SCALE = 38; + /** * Default precision/scale when user doesn't specify in the column metadata, such as * decimal and decimal(8). @@ -112,7 +113,7 @@ public class HiveDecimal implements Comp @Override public int hashCode() { - return trim(bd).hashCode(); + return bd.hashCode(); } @Override @@ -168,7 +169,7 @@ public class HiveDecimal implements Comp } public HiveDecimal multiply(HiveDecimal dec) { - return create(bd.multiply(dec.bd), true); + return create(bd.multiply(dec.bd), false); } public BigInteger unscaledValue() { @@ -201,7 +202,7 @@ public class HiveDecimal implements Comp } public HiveDecimal divide(HiveDecimal dec) { - return create(trim(bd.divide(dec.bd, MAX_SCALE, RoundingMode.HALF_UP)), true); + return create(bd.divide(dec.bd, MAX_SCALE, RoundingMode.HALF_UP), true); } /** @@ -231,6 +232,8 @@ public class HiveDecimal implements Comp return null; } + bd = trim(bd); + int intDigits = bd.precision() - bd.scale(); if (intDigits > MAX_PRECISION) { @@ -241,6 +244,8 @@ public class HiveDecimal implements Comp if (bd.scale() > maxScale ) { if (allowRounding) { bd = bd.setScale(maxScale, RoundingMode.HALF_UP); + // Trimming is again necessary, because rounding may introduce new trailing 0's. + bd = trim(bd); } else { bd = null; } @@ -254,6 +259,8 @@ public class HiveDecimal implements Comp return null; } + bd = trim(bd); + if (bd.scale() > maxScale) { bd = bd.setScale(maxScale, RoundingMode.HALF_UP); } Modified: hive/branches/spark/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java URL: http://svn.apache.org/viewvc/hive/branches/spark/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original) +++ hive/branches/spark/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Tue Nov 18 00:48:40 2014 @@ -519,6 +519,11 @@ public class HiveConf extends Configurat "work for all queries on your datastore. If all SQL queries fail (for example, your\n" + "metastore is backed by MongoDB), you might want to disable this to save the\n" + "try-and-fall-back cost."), + METASTORE_DIRECT_SQL_PARTITION_BATCH_SIZE("hive.metastore.direct.sql.batch.size", 0, + "Batch size for partition and other object retrieval from the underlying DB in direct\n" + + "SQL. For some DBs like Oracle and MSSQL, there are hardcoded or perf-based limitations\n" + + "that necessitate this. For DBs that can handle the queries, this isn't necessary and\n" + + "may impede performance. -1 means no batching, 0 means automatic batching."), METASTORE_TRY_DIRECT_SQL_DDL("hive.metastore.try.direct.sql.ddl", true, "Same as hive.metastore.try.direct.sql, for read statements within a transaction that\n" + "modifies metastore data. Due to non-standard behavior in Postgres, if a direct SQL\n" + @@ -565,6 +570,8 @@ public class HiveConf extends Configurat METASTORE_PART_INHERIT_TBL_PROPS("hive.metastore.partition.inherit.table.properties", "", "List of comma separated keys occurring in table properties which will get inherited to newly created partitions. \n" + "* implies all the keys will get inherited."), + METASTORE_FILTER_HOOK("hive.metastore.filter.hook", "org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl", + "Metastore hook class for filtering the metadata read results"), // Parameters for exporting metadata on table drop (requires the use of the) // org.apache.hadoop.hive.ql.parse.MetaDataExportListener preevent listener @@ -1487,6 +1494,9 @@ public class HiveConf extends Configurat "An example like \"select,drop\" will grant select and drop privilege to the owner\n" + "of the table. Note that the default gives the creator of a table no access to the\n" + "table (but see HIVE-8067)."), + HIVE_AUTHORIZATION_TASK_FACTORY("hive.security.authorization.task.factory", + "org.apache.hadoop.hive.ql.parse.authorization.HiveAuthorizationTaskFactoryImpl", + "Authorization DDL task factory implementation"), // if this is not set default value is set during config initialization // Default value can't be set in this constructor as it would refer names in other ConfVars @@ -1539,12 +1549,13 @@ public class HiveConf extends Configurat // operation log configuration HIVE_SERVER2_LOGGING_OPERATION_ENABLED("hive.server2.logging.operation.enabled", true, - "When true, HS2 will save operation logs"), + "When true, HS2 will save operation logs and make them available for clients"), HIVE_SERVER2_LOGGING_OPERATION_LOG_LOCATION("hive.server2.logging.operation.log.location", "${system:java.io.tmpdir}" + File.separator + "${system:user.name}" + File.separator + "operation_logs", "Top level directory where operation logs are stored if logging functionality is enabled"), - + HIVE_SERVER2_LOGGING_OPERATION_VERBOSE("hive.server2.logging.operation.verbose", false, + "When true, HS2 operation logs available for clients will be verbose"), // logging configuration HIVE_LOG4J_FILE("hive.log4j.file", "", "Hive log4j configuration file.\n" + @@ -1599,7 +1610,7 @@ public class HiveConf extends Configurat "table. From 0.12 onwards, they are displayed separately. This flag will let you\n" + "get old behavior, if desired. See, test-case in patch for HIVE-6689."), - HIVE_SSL_PROTOCOL_BLACKLIST("hive.ssl.protocol.blacklist", "SSLv2,SSLv2Hello,SSLv3", + HIVE_SSL_PROTOCOL_BLACKLIST("hive.ssl.protocol.blacklist", "SSLv2,SSLv3", "SSL Versions to disable for all Hive Servers"), // HiveServer2 specific configs Modified: hive/branches/spark/common/src/java/org/apache/hive/common/util/Decimal128FastBuffer.java URL: http://svn.apache.org/viewvc/hive/branches/spark/common/src/java/org/apache/hive/common/util/Decimal128FastBuffer.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/common/src/java/org/apache/hive/common/util/Decimal128FastBuffer.java (original) +++ hive/branches/spark/common/src/java/org/apache/hive/common/util/Decimal128FastBuffer.java Tue Nov 18 00:48:40 2014 @@ -1,6 +1,21 @@ /** - * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ + package org.apache.hive.common.util; import java.nio.ByteBuffer; Modified: hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java URL: http://svn.apache.org/viewvc/hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java (original) +++ hive/branches/spark/common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java Tue Nov 18 00:48:40 2014 @@ -811,7 +811,7 @@ public class TestDecimal128 { assertEquals("0.00923076923", d2.getHiveDecimalString()); Decimal128 d3 = new Decimal128("0.00923076000", (short) 15); - assertEquals("0.00923076000", d3.getHiveDecimalString()); + assertEquals("0.00923076", d3.getHiveDecimalString()); Decimal128 d4 = new Decimal128("4294967296.01", (short) 15); assertEquals("4294967296.01", d4.getHiveDecimalString()); @@ -849,37 +849,15 @@ public class TestDecimal128 { d11.update(hd6.bigDecimalValue()); assertEquals(hd6.toString(), d11.getHiveDecimalString()); - // The trailing zeros from a double value are trimmed automatically - // by the double data type Decimal128 d12 = new Decimal128(27.000, (short)3); - HiveDecimal hd7 = HiveDecimal.create(new BigDecimal("27.0")); + HiveDecimal hd7 = HiveDecimal.create(new BigDecimal("27.000")); assertEquals(hd7.toString(), d12.getHiveDecimalString()); - assertEquals("27.0", d12.getHiveDecimalString()); + assertEquals("27", d12.getHiveDecimalString()); Decimal128 d13 = new Decimal128(1234123000, (short)3); HiveDecimal hd8 = HiveDecimal.create(new BigDecimal("1234123000")); assertEquals(hd8.toString(), d13.getHiveDecimalString()); assertEquals("1234123000", d13.getHiveDecimalString()); - - Decimal128 d14 = new Decimal128(1.33e4, (short)10); - HiveDecimal hd9 = HiveDecimal.create(new BigDecimal("1.33e4")); - assertEquals(hd9.toString(), d14.getHiveDecimalString()); - assertEquals("13300", d14.getHiveDecimalString()); - - Decimal128 d15 = new Decimal128(1.33e-4, (short)10); - HiveDecimal hd10 = HiveDecimal.create(new BigDecimal("1.33e-4")); - assertEquals(hd10.toString(), d15.getHiveDecimalString()); - assertEquals("0.000133", d15.getHiveDecimalString()); - - Decimal128 d16 = new Decimal128("1.33e4", (short)10); - HiveDecimal hd11 = HiveDecimal.create(new BigDecimal("1.33e4")); - assertEquals(hd11.toString(), d16.getHiveDecimalString()); - assertEquals("13300", d16.getHiveDecimalString()); - - Decimal128 d17 = new Decimal128("1.33e-4", (short)10); - HiveDecimal hd12 = HiveDecimal.create(new BigDecimal("1.33e-4")); - assertEquals(hd12.toString(), d17.getHiveDecimalString()); - assertEquals("0.000133", d17.getHiveDecimalString()); } @Test
