Author: brock
Date: Sat Aug 30 06:44:46 2014
New Revision: 1621416

URL: http://svn.apache.org/r1621416
Log:
HIVE-7922 - Merge from trunk (2) [Spark Branch] (Merge commit)

Added:
    
hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationDrops.java
      - copied unchanged from r1621415, 
hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationDrops.java
    
hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/service/TestHS2ImpersonationWithRemoteMS.java
      - copied unchanged from r1621415, 
hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/service/TestHS2ImpersonationWithRemoteMS.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/orc/TimestampColumnStatistics.java
      - copied unchanged from r1621415, 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/TimestampColumnStatistics.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessControllerWrapper.java
      - copied unchanged from r1621415, 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessControllerWrapper.java
    
hive/branches/spark/ql/src/test/queries/clientnegative/authorization_create_tbl.q
      - copied unchanged from r1621415, 
hive/trunk/ql/src/test/queries/clientnegative/authorization_create_tbl.q
    
hive/branches/spark/ql/src/test/queries/clientnegative/authorization_create_view.q
      - copied unchanged from r1621415, 
hive/trunk/ql/src/test/queries/clientnegative/authorization_create_view.q
    hive/branches/spark/ql/src/test/queries/clientnegative/authorization_ctas2.q
      - copied unchanged from r1621415, 
hive/trunk/ql/src/test/queries/clientnegative/authorization_ctas2.q
    
hive/branches/spark/ql/src/test/queries/clientnegative/authorization_grant_group.q
      - copied unchanged from r1621415, 
hive/trunk/ql/src/test/queries/clientnegative/authorization_grant_group.q
    
hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_case.q
      - copied unchanged from r1621415, 
hive/trunk/ql/src/test/queries/clientnegative/authorization_role_case.q
    hive/branches/spark/ql/src/test/queries/clientpositive/orc_merge_incompat1.q
      - copied unchanged from r1620669, 
hive/trunk/ql/src/test/queries/clientpositive/orc_merge_incompat1.q
    hive/branches/spark/ql/src/test/queries/clientpositive/orc_merge_incompat2.q
      - copied unchanged from r1620669, 
hive/trunk/ql/src/test/queries/clientpositive/orc_merge_incompat2.q
    hive/branches/spark/ql/src/test/queries/clientpositive/orc_ppd_timestamp.q
      - copied unchanged from r1621415, 
hive/trunk/ql/src/test/queries/clientpositive/orc_ppd_timestamp.q
    hive/branches/spark/ql/src/test/queries/clientpositive/partition_char.q
      - copied unchanged from r1621415, 
hive/trunk/ql/src/test/queries/clientpositive/partition_char.q
    
hive/branches/spark/ql/src/test/queries/clientpositive/temp_table_display_colstats_tbllvl.q
      - copied unchanged from r1621415, 
hive/trunk/ql/src/test/queries/clientpositive/temp_table_display_colstats_tbllvl.q
    
hive/branches/spark/ql/src/test/results/clientnegative/authorization_create_tbl.q.out
      - copied unchanged from r1621415, 
hive/trunk/ql/src/test/results/clientnegative/authorization_create_tbl.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/authorization_create_view.q.out
      - copied unchanged from r1621415, 
hive/trunk/ql/src/test/results/clientnegative/authorization_create_view.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/authorization_ctas2.q.out
      - copied unchanged from r1621415, 
hive/trunk/ql/src/test/results/clientnegative/authorization_ctas2.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/authorization_grant_group.q.out
      - copied unchanged from r1621415, 
hive/trunk/ql/src/test/results/clientnegative/authorization_grant_group.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/authorization_role_case.q.out
      - copied unchanged from r1621415, 
hive/trunk/ql/src/test/results/clientnegative/authorization_role_case.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/orc_merge_incompat1.q.out
      - copied, changed from r1620669, 
hive/trunk/ql/src/test/results/clientpositive/orc_merge_incompat1.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/orc_merge_incompat2.q.out
      - copied, changed from r1620669, 
hive/trunk/ql/src/test/results/clientpositive/orc_merge_incompat2.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/orc_ppd_timestamp.q.out
      - copied unchanged from r1621415, 
hive/trunk/ql/src/test/results/clientpositive/orc_ppd_timestamp.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/partition_char.q.out
      - copied unchanged from r1621415, 
hive/trunk/ql/src/test/results/clientpositive/partition_char.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/temp_table_display_colstats_tbllvl.q.out
      - copied unchanged from r1621415, 
hive/trunk/ql/src/test/results/clientpositive/temp_table_display_colstats_tbllvl.q.out
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/FetchType.java
      - copied unchanged from r1621415, 
hive/trunk/service/src/java/org/apache/hive/service/cli/FetchType.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java
      - copied unchanged from r1621415, 
hive/trunk/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/operation/OperationLog.java
      - copied unchanged from r1621415, 
hive/trunk/service/src/java/org/apache/hive/service/cli/operation/OperationLog.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/server/ThreadFactoryWithGarbageCleanup.java
      - copied unchanged from r1621415, 
hive/trunk/service/src/java/org/apache/hive/service/server/ThreadFactoryWithGarbageCleanup.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/server/ThreadWithGarbageCleanup.java
      - copied unchanged from r1621415, 
hive/trunk/service/src/java/org/apache/hive/service/server/ThreadWithGarbageCleanup.java
    hive/branches/spark/service/src/test/org/apache/hive/service/cli/operation/
      - copied from r1621415, 
hive/trunk/service/src/test/org/apache/hive/service/cli/operation/
Removed:
    
hive/branches/spark/ql/src/test/queries/clientnegative/authorize_create_tbl.q
    hive/branches/spark/ql/src/test/queries/clientnegative/orc_merge1.q
    hive/branches/spark/ql/src/test/queries/clientnegative/orc_merge2.q
    hive/branches/spark/ql/src/test/queries/clientnegative/orc_merge3.q
    hive/branches/spark/ql/src/test/queries/clientnegative/orc_merge4.q
    hive/branches/spark/ql/src/test/queries/clientnegative/orc_merge5.q
    
hive/branches/spark/ql/src/test/queries/clientnegative/temp_table_column_stats.q
    
hive/branches/spark/ql/src/test/results/clientnegative/authorize_create_tbl.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/temp_table_column_stats.q.out
Modified:
    hive/branches/spark/   (props changed)
    
hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
    
hive/branches/spark/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    
hive/branches/spark/contrib/src/test/results/clientnegative/serde_regex.q.out
    
hive/branches/spark/contrib/src/test/results/clientpositive/fileformat_base64.q.out
    
hive/branches/spark/contrib/src/test/results/clientpositive/serde_regex.q.out
    
hive/branches/spark/hbase-handler/src/test/results/negative/cascade_dbdrop.q.out
    
hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java
    
hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
    
hive/branches/spark/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java
    
hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatPartition.java
    
hive/branches/spark/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java
    
hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProvider.java
    
hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
    hive/branches/spark/itests/src/test/resources/testconfiguration.properties
    
hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessControllerForTest.java
    
hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidatorForTest.java
    
hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactoryForTest.java
    
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
    
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
    
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
    
hive/branches/spark/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql/io/orc/OrcProto.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorFileSinkOperator.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/hooks/UpdateInputAccessTimeHook.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/index/IndexMetadataChangeTask.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/index/bitmap/BitmapIndexHandler.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeMapper.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ColumnStatisticsImpl.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFileMergeMapper.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/PredicateLeaf.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactory.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
    
hive/branches/spark/ql/src/protobuf/org/apache/hadoop/hive/ql/io/orc/orc_proto.proto
    
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java
    
hive/branches/spark/ql/src/test/queries/clientnegative/authorization_public_create.q
    
hive/branches/spark/ql/src/test/queries/clientnegative/authorization_public_drop.q
    
hive/branches/spark/ql/src/test/queries/clientnegative/authorize_grant_public.q
    
hive/branches/spark/ql/src/test/queries/clientnegative/authorize_revoke_public.q
    hive/branches/spark/ql/src/test/queries/clientpositive/add_part_exist.q
    hive/branches/spark/ql/src/test/queries/clientpositive/alter1.q
    hive/branches/spark/ql/src/test/queries/clientpositive/alter_char1.q
    hive/branches/spark/ql/src/test/queries/clientpositive/alter_index.q
    
hive/branches/spark/ql/src/test/queries/clientpositive/alter_partition_coltype.q
    hive/branches/spark/ql/src/test/queries/clientpositive/alter_skewed_table.q
    hive/branches/spark/ql/src/test/queries/clientpositive/alter_varchar1.q
    
hive/branches/spark/ql/src/test/queries/clientpositive/alter_view_as_select.q
    hive/branches/spark/ql/src/test/queries/clientpositive/alter_view_rename.q
    hive/branches/spark/ql/src/test/queries/clientpositive/archive_multi.q
    hive/branches/spark/ql/src/test/queries/clientpositive/authorization_1.q
    hive/branches/spark/ql/src/test/queries/clientpositive/authorization_5.q
    
hive/branches/spark/ql/src/test/queries/clientpositive/authorization_grant_public_role.q
    
hive/branches/spark/ql/src/test/queries/clientpositive/authorization_role_grant2.q
    
hive/branches/spark/ql/src/test/queries/clientpositive/create_or_replace_view.q
    
hive/branches/spark/ql/src/test/queries/clientpositive/drop_multi_partitions.q
    hive/branches/spark/ql/src/test/queries/clientpositive/exchange_partition.q
    hive/branches/spark/ql/src/test/queries/clientpositive/index_auto_empty.q
    hive/branches/spark/ql/src/test/queries/clientpositive/input46.q
    hive/branches/spark/ql/src/test/queries/clientpositive/touch.q
    
hive/branches/spark/ql/src/test/queries/clientpositive/unset_table_view_property.q
    
hive/branches/spark/ql/src/test/resources/orc-file-dump-dictionary-threshold.out
    hive/branches/spark/ql/src/test/resources/orc-file-dump.out
    
hive/branches/spark/ql/src/test/results/clientnegative/alter_view_as_select_not_exist.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/alter_view_failure.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/alter_view_failure2.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/alter_view_failure4.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/alter_view_failure5.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/alter_view_failure6.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/alter_view_failure7.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/alter_view_failure9.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/analyze_view.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/authorization_drop_db_cascade.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/authorization_not_owner_drop_view.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/authorization_part.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/authorization_public_create.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/authorization_public_drop.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/authorization_select_view.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/authorize_grant_public.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/authorize_revoke_public.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/create_or_replace_view1.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/create_or_replace_view2.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/create_or_replace_view3.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/create_or_replace_view4.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/create_or_replace_view5.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/create_or_replace_view6.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/create_or_replace_view7.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/create_or_replace_view8.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/create_view_failure1.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/create_view_failure2.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/create_view_failure4.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/desc_failure3.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/drop_table_failure2.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/insert_view_failure.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/invalidate_view1.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/load_view_failure.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/recursive_view.q.out
    
hive/branches/spark/ql/src/test/results/clientnegative/unset_view_property.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/add_part_exist.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/allcolref_in_udf.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/alter1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/alter_char1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/alter_index.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/alter_partition_coltype.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/alter_rename_partition_authorization.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/alter_rename_table.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/alter_skewed_table.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/alter_table_update_status.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/alter_varchar1.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/alter_view_as_select.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/alter_view_rename.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/archive_multi.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/authorization_1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/authorization_2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/authorization_3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/authorization_4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/authorization_5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/authorization_6.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/authorization_8.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/authorization_cli_createtab.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/authorization_create_temp_table.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/authorization_grant_public_role.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/authorization_owner_actions.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/authorization_role_grant2.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/authorization_view_sqlstd.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/autogen_colalias.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/char_nested_types.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/colstats_all_nulls.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/create_big_view.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/create_default_prop.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/create_like_tbl_props.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/create_like_view.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/create_or_replace_view.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/create_union_table.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/create_view.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/create_view_partitioned.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/create_view_translate.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/cross_product_check_1.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/cross_product_check_2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ctas.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ctas_char.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ctas_colname.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ctas_date.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/ctas_uses_database_location.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ctas_varchar.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/cte_2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/database.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/database_drop.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/dbtxnmgr_ddl1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/dbtxnmgr_query5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/decimal_6.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/decimal_serde.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/describe_formatted_view_partitioned.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/describe_formatted_view_partitioned_json.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/describe_syntax.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/drop_multi_partitions.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/exchange_partition.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/explain_dependency.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/explain_logical.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/fileformat_sequencefile.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/fileformat_text.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/global_limit.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/groupby_duplicate_key.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/index_auto_empty.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/input15.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/input46.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/inputddl1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/inputddl2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/inputddl3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/insert1.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/insert2_overwrite_partitions.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join41.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/join_filters_overlap.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_view.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/lateral_view_noalias.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/lateral_view_outer.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/merge3.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/multi_insert_lateral_view.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/multi_join_union.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/nestedvirtual.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/nonmr_fetch.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/nullformat.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/nullformatCTAS.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/orc_analyze.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/orc_createas1.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/parallel_orderby.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/parquet_ctas.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/parquet_join.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/partition_decode_name.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/partition_special_char.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/ppd_field_garbage.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ppd_union_view.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ptf.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/query_result_fileformat.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/quotedid_basic.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/rcfile_createas1.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/rcfile_default_format.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/sample_islocalmode_hook.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/semijoin.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/serde_regex.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/show_create_table_db_table.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/show_create_table_temp_table.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/show_create_table_view.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoin_noskew.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/smb_mapjoin9.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/stats5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/stats_counter.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/str_to_map.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/subquery_exists.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/subquery_exists_having.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/subquery_notin.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/subquery_views.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/symlink_text_input_format.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/temp_table.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/temp_table_gb1.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/temp_table_join1.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/temp_table_names.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/temp_table_precedence.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/temp_table_subquery1.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/tez/cross_product_check_1.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/tez/cross_product_check_2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/ctas.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/tez/dynpart_sort_opt_vectorization.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/insert1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/orc_analyze.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/ptf.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/tez/stats_counter.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/tez/subquery_exists.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/temp_table.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/tez_dml.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/tez_union.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_decimal_aggregate.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorization_part_project.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorized_ptf.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorized_timestamp_funcs.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/touch.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union24.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union25.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union27.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union31.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union32.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union_top_level.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/unset_table_view_property.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/updateAccessTime.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/varchar_nested_types.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/vector_between_in.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_mapjoin.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/vectorization_decimal_date.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vectorized_ptf.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/view.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/view_cast.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/view_inputs.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/windowing.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/windowing_navfn.q.out
    
hive/branches/spark/ql/src/test/results/clientpositive/windowing_streaming.q.out
    
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveCharObjectInspector.java
    hive/branches/spark/service/if/TCLIService.thrift
    hive/branches/spark/service/src/gen/thrift/gen-cpp/TCLIService_types.cpp
    hive/branches/spark/service/src/gen/thrift/gen-cpp/TCLIService_types.h
    
hive/branches/spark/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TFetchResultsReq.java
    hive/branches/spark/service/src/gen/thrift/gen-py/TCLIService/ttypes.py
    hive/branches/spark/service/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/CLIService.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/CLIServiceClient.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/EmbeddedCLIServiceClient.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/ICLIService.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/operation/GetFunctionsOperation.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/operation/GetSchemasOperation.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/operation/GetTableTypesOperation.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/operation/GetTypeInfoOperation.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/operation/MetadataOperation.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/operation/Operation.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/session/HiveSession.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/session/HiveSessionBase.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/session/SessionManager.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java
    
hive/branches/spark/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java

Propchange: hive/branches/spark/
------------------------------------------------------------------------------
  Merged /hive/trunk:r1620103-1620669,1620671-1621415

Modified: 
hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/FileUtils.java?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- 
hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
 (original)
+++ 
hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
 Sat Aug 30 06:44:46 2014
@@ -24,7 +24,6 @@ import java.net.URI;
 import java.net.URISyntaxException;
 import java.security.AccessControlException;
 import java.security.PrivilegedExceptionAction;
-import java.util.ArrayList;
 import java.util.BitSet;
 import java.util.List;
 
@@ -34,12 +33,10 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.FsShell;
 import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.fs.permission.FsAction;
-import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.shims.HadoopShims;
 import org.apache.hadoop.hive.shims.HadoopShims.HdfsFileStatus;
@@ -628,4 +625,62 @@ public final class FileUtils {
     //Once equality has been added in HDFS-4321, we should make use of it
     return fs1.getUri().equals(fs2.getUri());
   }
+
+  /**
+   * Checks if delete can be performed on given path by given user.
+   * If file does not exist it just returns without throwing an Exception
+   * @param path
+   * @param conf
+   * @param user
+   * @throws AccessControlException
+   * @throws InterruptedException
+   * @throws Exception
+   */
+  public static void checkDeletePermission(Path path, Configuration conf, 
String user)
+      throws AccessControlException, InterruptedException, Exception {
+   // This requires ability to delete the given path.
+    // The following 2 conditions should be satisfied for this-
+    // 1. Write permissions on parent dir
+    // 2. If sticky bit is set on parent dir then one of following should be
+    // true
+    //   a. User is owner of the current dir/file
+    //   b. User is owner of the parent dir
+    //   Super users are also allowed to drop the file, but there is no good 
way of checking
+    //   if a user is a super user. Also super users running hive queries is 
not a common
+    //   use case. super users can also do a chown to be able to drop the file
+
+    final FileSystem fs = path.getFileSystem(conf);
+    if (!fs.exists(path)) {
+      // no file/dir to be deleted
+      return;
+    }
+    Path parPath = path.getParent();
+    // check user has write permissions on the parent dir
+    FileStatus stat = fs.getFileStatus(path);
+    FileUtils.checkFileAccessWithImpersonation(fs, stat, FsAction.WRITE, user);
+
+    // check if sticky bit is set on the parent dir
+    FileStatus parStatus = fs.getFileStatus(parPath);
+    if (!parStatus.getPermission().getStickyBit()) {
+      // no sticky bit, so write permission on parent dir is sufficient
+      // no further checks needed
+      return;
+    }
+
+    // check if user is owner of parent dir
+    if (parStatus.getOwner().equals(user)) {
+      return;
+    }
+
+    // check if user is owner of current dir/file
+    FileStatus childStatus = fs.getFileStatus(path);
+    if (childStatus.getOwner().equals(user)) {
+      return;
+    }
+    String msg = String.format("Permission Denied: User %s can't delete %s 
because sticky bit is"
+        + " set on the parent dir and user does not own this file or its 
parent", user, path);
+    throw new IOException(msg);
+
+  }
+
 }

Modified: 
hive/branches/spark/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- 
hive/branches/spark/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
(original)
+++ 
hive/branches/spark/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
Sat Aug 30 06:44:46 2014
@@ -54,7 +54,6 @@ import org.apache.hive.common.HiveCompat
  * Hive Configuration.
  */
 public class HiveConf extends Configuration {
-
   protected String hiveJar;
   protected Properties origProp;
   protected String auxJars;
@@ -1415,6 +1414,14 @@ public class HiveConf extends Configurat
         "to construct a list exception handlers to handle exceptions thrown\n" 
+
         "by record readers"),
 
+    // operation log configuration
+    
HIVE_SERVER2_LOGGING_OPERATION_ENABLED("hive.server2.logging.operation.enabled",
 true,
+        "When true, HS2 will save operation logs"),
+    
HIVE_SERVER2_LOGGING_OPERATION_LOG_LOCATION("hive.server2.logging.operation.log.location",
+        "${system:java.io.tmpdir}" + File.separator + "${system:user.name}" + 
File.separator +
+            "operation_logs",
+        "Top level directory where operation logs are stored if logging 
functionality is enabled"),
+
     // logging configuration
     HIVE_LOG4J_FILE("hive.log4j.file", "",
         "Hive log4j configuration file.\n" +
@@ -1486,8 +1493,11 @@ public class HiveConf extends Configurat
         "Minimum number of worker threads when in HTTP mode."),
     
HIVE_SERVER2_THRIFT_HTTP_MAX_WORKER_THREADS("hive.server2.thrift.http.max.worker.threads",
 500,
         "Maximum number of worker threads when in HTTP mode."),
-    
HIVE_SERVER2_THRIFT_HTTP_MAX_IDLE_TIME("hive.server2.thrift.http.max.idle.time",
 1800000, 
+    
HIVE_SERVER2_THRIFT_HTTP_MAX_IDLE_TIME("hive.server2.thrift.http.max.idle.time",
 1800000,
         "Maximum idle time in milliseconds for a connection on the server when 
in HTTP mode."),
+    
HIVE_SERVER2_THRIFT_HTTP_WORKER_KEEPALIVE_TIME("hive.server2.thrift.http.worker.keepalive.time",
 60,
+        "Keepalive time (in seconds) for an idle http worker thread. When 
number of workers > min workers, " +
+        "excess threads are killed after this time interval."),
 
     // binary transport settings
     HIVE_SERVER2_THRIFT_PORT("hive.server2.thrift.port", 10000,
@@ -1510,7 +1520,9 @@ public class HiveConf extends Configurat
         "Minimum number of Thrift worker threads"),
     
HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS("hive.server2.thrift.max.worker.threads",
 500,
         "Maximum number of Thrift worker threads"),
-
+    
HIVE_SERVER2_THRIFT_WORKER_KEEPALIVE_TIME("hive.server2.thrift.worker.keepalive.time",
 60,
+        "Keepalive time (in seconds) for an idle worker thread. When number of 
workers > min workers, " +
+        "excess threads are killed after this time interval."),
     // Configuration for async thread pool in SessionManager
     HIVE_SERVER2_ASYNC_EXEC_THREADS("hive.server2.async.exec.threads", 100,
         "Number of threads in the async thread pool for HiveServer2"),

Modified: 
hive/branches/spark/contrib/src/test/results/clientnegative/serde_regex.q.out
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/contrib/src/test/results/clientnegative/serde_regex.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- 
hive/branches/spark/contrib/src/test/results/clientnegative/serde_regex.q.out 
(original)
+++ 
hive/branches/spark/contrib/src/test/results/clientnegative/serde_regex.q.out 
Sat Aug 30 06:44:46 2014
@@ -56,7 +56,7 @@ STAGE PLANS:
           serde properties:
             input.regex ([^ ]*) ([^ ]*) ([^ ]*) (-|\[[^\]]*\]) ([^ 
"]*|"[^"]*") (-|[0-9]*) (-|[0-9]*)(?: ([^ "]*|"[^"]*") ([^ "]*|"[^"]*"))?
             output.format.string %1$s %2$s %3$s %4$s %5$s %6$s %7$s %8$s %9$s
-          name: serde_regex
+          name: default.serde_regex
 
 PREHOOK: query: CREATE TABLE serde_regex(
   host STRING,

Modified: 
hive/branches/spark/contrib/src/test/results/clientpositive/fileformat_base64.q.out
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/contrib/src/test/results/clientpositive/fileformat_base64.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- 
hive/branches/spark/contrib/src/test/results/clientpositive/fileformat_base64.q.out
 (original)
+++ 
hive/branches/spark/contrib/src/test/results/clientpositive/fileformat_base64.q.out
 Sat Aug 30 06:44:46 2014
@@ -22,7 +22,7 @@ STAGE PLANS:
           columns: key int, value string
           input format: 
org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextInputFormat
           output format: 
org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextOutputFormat
-          name: base64_test
+          name: default.base64_test
 
 PREHOOK: query: CREATE TABLE base64_test(key INT, value STRING) STORED AS
   INPUTFORMAT 
'org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextInputFormat'

Modified: 
hive/branches/spark/contrib/src/test/results/clientpositive/serde_regex.q.out
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/contrib/src/test/results/clientpositive/serde_regex.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- 
hive/branches/spark/contrib/src/test/results/clientpositive/serde_regex.q.out 
(original)
+++ 
hive/branches/spark/contrib/src/test/results/clientpositive/serde_regex.q.out 
Sat Aug 30 06:44:46 2014
@@ -48,7 +48,7 @@ STAGE PLANS:
           serde properties:
             input.regex ([^ ]*) ([^ ]*) ([^ ]*) (-|\[[^\]]*\]) ([^ 
"]*|"[^"]*") (-|[0-9]*) (-|[0-9]*)(?: ([^ "]*|"[^"]*") ([^ "]*|"[^"]*"))?
             output.format.string %1$s %2$s %3$s %4$s %5$s %6$s %7$s %8$s %9$s
-          name: serde_regex
+          name: default.serde_regex
 
 PREHOOK: query: CREATE TABLE serde_regex(
   host STRING,

Modified: 
hive/branches/spark/hbase-handler/src/test/results/negative/cascade_dbdrop.q.out
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/hbase-handler/src/test/results/negative/cascade_dbdrop.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- 
hive/branches/spark/hbase-handler/src/test/results/negative/cascade_dbdrop.q.out
 (original)
+++ 
hive/branches/spark/hbase-handler/src/test/results/negative/cascade_dbdrop.q.out
 Sat Aug 30 06:44:46 2014
@@ -20,7 +20,7 @@ WITH SERDEPROPERTIES ("hbase.columns.map
 TBLPROPERTIES ("hbase.table.name" = "hbase_table_0")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:hbasedb
-PREHOOK: Output: [email protected]_table_0
+PREHOOK: Output: hbaseDB@hbase_table_0
 POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
 -- Hadoop 0.23 changes the behavior FsShell on Exit Codes
 -- In Hadoop 0.20
@@ -37,7 +37,6 @@ WITH SERDEPROPERTIES ("hbase.columns.map
 TBLPROPERTIES ("hbase.table.name" = "hbase_table_0")
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:hbasedb
-POSTHOOK: Output: [email protected]_table_0
 POSTHOOK: Output: hbaseDB@hbase_table_0
 Found 3 items
 #### A masked pattern was here ####

Modified: 
hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- 
hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java
 (original)
+++ 
hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java
 Sat Aug 30 06:44:46 2014
@@ -86,7 +86,7 @@ final class CreateTableHook extends HCat
           "Operation not supported. Create table as " +
             "Select is not a valid operation.");
 
-      case HiveParser.TOK_TABLEBUCKETS:
+      case HiveParser.TOK_ALTERTABLE_BUCKETS:
         break;
 
       case HiveParser.TOK_LIKETABLE:

Modified: 
hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- 
hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
 (original)
+++ 
hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
 Sat Aug 30 06:44:46 2014
@@ -71,7 +71,7 @@ public class HCatSemanticAnalyzer extend
       hook = new CreateDatabaseHook();
       return hook.preAnalyze(context, ast);
 
-    case HiveParser.TOK_ALTERTABLE_PARTITION:
+    case HiveParser.TOK_ALTERTABLE:
       if (((ASTNode) ast.getChild(1)).getToken().getType() == 
HiveParser.TOK_ALTERTABLE_FILEFORMAT) {
         return ast;
       } else if (((ASTNode) ast.getChild(1)).getToken().getType() == 
HiveParser.TOK_ALTERTABLE_MERGEFILES) {
@@ -163,7 +163,6 @@ public class HCatSemanticAnalyzer extend
 
       case HiveParser.TOK_CREATETABLE:
       case HiveParser.TOK_CREATEDATABASE:
-      case HiveParser.TOK_ALTERTABLE_PARTITION:
 
         // HCat will allow these operations to be performed.
         // Database DDL
@@ -178,12 +177,20 @@ public class HCatSemanticAnalyzer extend
       case HiveParser.TOK_CREATEINDEX:
       case HiveParser.TOK_DROPINDEX:
       case HiveParser.TOK_SHOWINDEXES:
+        break;
 
         // View DDL
         //case HiveParser.TOK_ALTERVIEW_ADDPARTS:
-      case HiveParser.TOK_ALTERVIEW_DROPPARTS:
-      case HiveParser.TOK_ALTERVIEW_PROPERTIES:
-      case HiveParser.TOK_ALTERVIEW_RENAME:
+      case HiveParser.TOK_ALTERVIEW:
+        switch (ast.getChild(1).getType()) {
+          case HiveParser.TOK_ALTERVIEW_ADDPARTS:
+          case HiveParser.TOK_ALTERVIEW_DROPPARTS:
+          case HiveParser.TOK_ALTERVIEW_RENAME:
+          case HiveParser.TOK_ALTERVIEW_PROPERTIES:
+          case HiveParser.TOK_ALTERVIEW_DROPPROPERTIES:
+        }
+        break;
+
       case HiveParser.TOK_CREATEVIEW:
       case HiveParser.TOK_DROPVIEW:
 
@@ -205,20 +212,39 @@ public class HCatSemanticAnalyzer extend
       case HiveParser.TOK_DESCFUNCTION:
       case HiveParser.TOK_SHOWFUNCTIONS:
       case HiveParser.TOK_EXPLAIN:
+        break;
 
         // Table DDL
-      case HiveParser.TOK_ALTERTABLE_ADDPARTS:
-      case HiveParser.TOK_ALTERTABLE_ADDCOLS:
-      case HiveParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
-      case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES:
-      case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT:
-      case HiveParser.TOK_ALTERTABLE_DROPPARTS:
-      case HiveParser.TOK_ALTERTABLE_PROPERTIES:
-      case HiveParser.TOK_ALTERTABLE_RENAME:
-      case HiveParser.TOK_ALTERTABLE_RENAMECOL:
-      case HiveParser.TOK_ALTERTABLE_REPLACECOLS:
-      case HiveParser.TOK_ALTERTABLE_SERIALIZER:
-      case HiveParser.TOK_ALTERTABLE_TOUCH:
+      case HiveParser.TOK_ALTERTABLE:
+        switch (ast.getChild(1).getType()) {
+          case HiveParser.TOK_ALTERTABLE_ADDPARTS:
+          case HiveParser.TOK_ALTERTABLE_ADDCOLS:
+          case HiveParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
+          case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES:
+          case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT:
+          case HiveParser.TOK_ALTERTABLE_DROPPARTS:
+          case HiveParser.TOK_ALTERTABLE_PROPERTIES:
+          case HiveParser.TOK_ALTERTABLE_DROPPROPERTIES:
+          case HiveParser.TOK_ALTERTABLE_RENAME:
+          case HiveParser.TOK_ALTERTABLE_RENAMECOL:
+          case HiveParser.TOK_ALTERTABLE_REPLACECOLS:
+          case HiveParser.TOK_ALTERTABLE_SERIALIZER:
+          case HiveParser.TOK_ALTERTABLE_TOUCH:
+          case HiveParser.TOK_ALTERTABLE_ARCHIVE:
+          case HiveParser.TOK_ALTERTABLE_UNARCHIVE:
+          case HiveParser.TOK_ALTERTABLE_EXCHANGEPARTITION:
+          case HiveParser.TOK_ALTERTABLE_SKEWED:
+          case HiveParser.TOK_ALTERTABLE_FILEFORMAT:
+          case HiveParser.TOK_ALTERTABLE_PROTECTMODE:
+          case HiveParser.TOK_ALTERTABLE_LOCATION:
+          case HiveParser.TOK_ALTERTABLE_MERGEFILES:
+          case HiveParser.TOK_ALTERTABLE_RENAMEPART:
+          case HiveParser.TOK_ALTERTABLE_SKEWED_LOCATION:
+          case HiveParser.TOK_ALTERTABLE_BUCKETS:
+          case HiveParser.TOK_ALTERTABLE_COMPACT:
+        }
+        break;
+
       case HiveParser.TOK_DESCTABLE:
       case HiveParser.TOK_DROPTABLE:
       case HiveParser.TOK_SHOW_TABLESTATUS:

Modified: 
hive/branches/spark/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- 
hive/branches/spark/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java
 (original)
+++ 
hive/branches/spark/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java
 Sat Aug 30 06:44:46 2014
@@ -156,7 +156,7 @@ public class TestSemanticAnalysis extend
   public void testCreateTableIfNotExists() throws MetaException, TException, 
NoSuchObjectException, CommandNeedRetryException {
 
     hcatDriver.run("drop table " + TBL_NAME);
-    hcatDriver.run("create table junit_sem_analysis (a int) stored as RCFILE");
+    hcatDriver.run("create table " + TBL_NAME + " (a int) stored as RCFILE");
     Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, 
TBL_NAME);
     List<FieldSchema> cols = tbl.getSd().getCols();
     assertEquals(1, cols.size());

Modified: 
hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatPartition.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatPartition.java?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- 
hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatPartition.java
 (original)
+++ 
hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatPartition.java
 Sat Aug 30 06:44:46 2014
@@ -51,6 +51,7 @@ public class HCatPartition {
   private int createTime;
   private int lastAccessTime;
   private StorageDescriptor sd;
+  private List<HCatFieldSchema> columns; // Cache column-list from this.sd.
   private Map<String, String> parameters;
 
   // For use from within HCatClient.getPartitions().
@@ -68,6 +69,7 @@ public class HCatPartition {
     }
 
     this.sd = partition.getSd();
+    this.columns = getColumns(this.sd);
   }
 
   // For constructing HCatPartitions afresh, as an argument to 
HCatClient.addPartitions().
@@ -77,6 +79,7 @@ public class HCatPartition {
     this.dbName = hcatTable.getDbName();
     this.sd = new StorageDescriptor(hcatTable.getSd());
     this.sd.setLocation(location);
+    this.columns = getColumns(this.sd);
     this.createTime = (int)(System.currentTimeMillis()/1000);
     this.lastAccessTime = -1;
     this.values = new ArrayList<String>(hcatTable.getPartCols().size());
@@ -98,7 +101,7 @@ public class HCatPartition {
     this.dbName = rhs.dbName;
     this.sd = new StorageDescriptor(rhs.sd);
     this.sd.setLocation(location);
-
+    this.columns = getColumns(this.sd);
     this.createTime = (int) (System.currentTimeMillis() / 1000);
     this.lastAccessTime = -1;
     this.values = new ArrayList<String>(hcatTable.getPartCols().size());
@@ -112,6 +115,14 @@ public class HCatPartition {
     }
   }
 
+  private static List<HCatFieldSchema> getColumns(StorageDescriptor sd) throws 
HCatException {
+    ArrayList<HCatFieldSchema> columns = new 
ArrayList<HCatFieldSchema>(sd.getColsSize());
+    for (FieldSchema fieldSchema : sd.getCols()) {
+      columns.add(HCatSchemaUtils.getHCatFieldSchema(fieldSchema));
+    }
+    return columns;
+  }
+
   // For use from HCatClient.addPartitions(), to construct from user-input.
   Partition toHivePartition() throws HCatException {
     Partition hivePtn = new Partition();
@@ -172,11 +183,7 @@ public class HCatPartition {
    *
    * @return the columns
    */
-  public List<HCatFieldSchema> getColumns() throws HCatException {
-    ArrayList<HCatFieldSchema> columns = new 
ArrayList<HCatFieldSchema>(sd.getColsSize());
-    for (FieldSchema fieldSchema : sd.getCols()) {
-      columns.add(HCatSchemaUtils.getHCatFieldSchema(fieldSchema));
-    }
+  public List<HCatFieldSchema> getColumns() {
     return columns;
   }
 

Modified: 
hive/branches/spark/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- 
hive/branches/spark/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java
 (original)
+++ 
hive/branches/spark/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java
 Sat Aug 30 06:44:46 2014
@@ -28,6 +28,7 @@ import java.util.concurrent.atomic.Atomi
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.HiveMetaStore;
@@ -49,6 +50,7 @@ public class MiniHS2 extends AbstractHiv
   public static final String HS2_BINARY_MODE = "binary";
   public static final String HS2_HTTP_MODE = "http";
   private static final String driverName = "org.apache.hive.jdbc.HiveDriver";
+  private static final FsPermission FULL_PERM = new FsPermission((short)00777);
   private HiveServer2 hiveServer2 = null;
   private final File baseDir;
   private final Path baseDfsDir;
@@ -59,6 +61,7 @@ public class MiniHS2 extends AbstractHiv
   private boolean useMiniKdc = false;
   private final String serverPrincipal;
   private final String serverKeytab;
+  private final boolean isMetastoreRemote;
 
   public static class Builder {
     private HiveConf hiveConf = new HiveConf();
@@ -67,6 +70,7 @@ public class MiniHS2 extends AbstractHiv
     private String serverPrincipal;
     private String serverKeytab;
     private boolean isHTTPTransMode = false;
+    private boolean isMetastoreRemote;
 
     public Builder() {
     }
@@ -83,6 +87,11 @@ public class MiniHS2 extends AbstractHiv
       return this;
     }
 
+    public Builder withRemoteMetastore() {
+      this.isMetastoreRemote = true;
+      return this;
+    }
+
     public Builder withConf(HiveConf hiveConf) {
       this.hiveConf = hiveConf;
       return this;
@@ -107,7 +116,8 @@ public class MiniHS2 extends AbstractHiv
       } else {
         hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, HS2_BINARY_MODE);
       }
-      return new MiniHS2(hiveConf, useMiniMR, useMiniKdc, serverPrincipal, 
serverKeytab);
+      return new MiniHS2(hiveConf, useMiniMR, useMiniKdc, serverPrincipal, 
serverKeytab,
+          isMetastoreRemote);
     }
   }
 
@@ -139,12 +149,14 @@ public class MiniHS2 extends AbstractHiv
     return useMiniKdc;
   }
 
-  private MiniHS2(HiveConf hiveConf, boolean useMiniMR, boolean useMiniKdc, 
String serverPrincipal, String serverKeytab) throws Exception {
+  private MiniHS2(HiveConf hiveConf, boolean useMiniMR, boolean useMiniKdc,
+      String serverPrincipal, String serverKeytab, boolean isMetastoreRemote) 
throws Exception {
     super(hiveConf, "localhost", MetaStoreUtils.findFreePort(), 
MetaStoreUtils.findFreePort());
     this.useMiniMR = useMiniMR;
     this.useMiniKdc = useMiniKdc;
     this.serverPrincipal = serverPrincipal;
     this.serverKeytab = serverKeytab;
+    this.isMetastoreRemote = isMetastoreRemote;
     baseDir =  Files.createTempDir();
     FileSystem fs;
     if (useMiniMR) {
@@ -169,6 +181,9 @@ public class MiniHS2 extends AbstractHiv
 
     fs.mkdirs(baseDfsDir);
     Path wareHouseDir = new Path(baseDfsDir, "warehouse");
+    // Create warehouse with 777, so that user impersonation has no issues.
+    FileSystem.mkdirs(fs, wareHouseDir, FULL_PERM);
+
     fs.mkdirs(wareHouseDir);
     setWareHouseDir(wareHouseDir.toString());
     System.setProperty(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, 
metaStoreURL);
@@ -180,10 +195,15 @@ public class MiniHS2 extends AbstractHiv
     hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT, getHttpPort());
 
     Path scratchDir = new Path(baseDfsDir, "scratch");
-    fs.mkdirs(scratchDir);
+
+    // Create scratchdir with 777, so that user impersonation has no issues.
+    FileSystem.mkdirs(fs, scratchDir, FULL_PERM);
     System.setProperty(HiveConf.ConfVars.SCRATCHDIR.varname, 
scratchDir.toString());
-    System.setProperty(HiveConf.ConfVars.LOCALSCRATCHDIR.varname,
-        baseDir.getPath() + File.separator + "scratch");
+    hiveConf.setVar(ConfVars.SCRATCHDIR, scratchDir.toString());
+
+    String localScratchDir = baseDir.getPath() + File.separator + "scratch";
+    System.setProperty(HiveConf.ConfVars.LOCALSCRATCHDIR.varname, 
localScratchDir);
+    hiveConf.setVar(ConfVars.LOCALSCRATCHDIR, localScratchDir);
   }
 
   public MiniHS2(HiveConf hiveConf) throws Exception {
@@ -191,10 +211,17 @@ public class MiniHS2 extends AbstractHiv
   }
 
   public MiniHS2(HiveConf hiveConf, boolean useMiniMR) throws Exception {
-    this(hiveConf, useMiniMR, false, null, null);
+    this(hiveConf, useMiniMR, false, null, null, false);
   }
 
   public void start(Map<String, String> confOverlay) throws Exception {
+    if (isMetastoreRemote) {
+      int metaStorePort = MetaStoreUtils.findFreePort();
+      getHiveConf().setVar(ConfVars.METASTOREURIS, "thrift://localhost:" + 
metaStorePort);
+      MetaStoreUtils.startMetaStore(metaStorePort,
+      ShimLoader.getHadoopThriftAuthBridge(), getHiveConf());
+    }
+
     hiveServer2 = new HiveServer2();
     // Set confOverlay parameters
     for (Map.Entry<String, String> entry : confOverlay.entrySet()) {
@@ -208,6 +235,9 @@ public class MiniHS2 extends AbstractHiv
 
   public void stop() {
     verifyStarted();
+    // Currently there is no way to stop the MetaStore service. It will be 
stopped when the
+    // test JVM exits. This is how other tests are also using MetaStore server.
+
     hiveServer2.stop();
     setStarted(false);
     try {

Modified: 
hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProvider.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProvider.java?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- 
hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProvider.java
 (original)
+++ 
hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProvider.java
 Sat Aug 30 06:44:46 2014
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.ql.security;
 
 import java.net.URI;
-import java.security.AccessControlException;
 
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -48,7 +47,7 @@ public class TestStorageBasedMetastoreAu
   @Override
   protected void allowCreateInDb(String dbName, String userName, String 
location)
       throws Exception {
-    setPermissions(location,"-rwxr--r--");
+    setPermissions(location,"-rwxr--r-t");
   }
 
   @Override
@@ -79,7 +78,7 @@ public class TestStorageBasedMetastoreAu
   @Override
   protected void allowDropOnDb(String dbName, String userName, String location)
       throws Exception {
-    setPermissions(location,"-rwxr--r--");
+    setPermissions(location,"-rwxr--r-t");
   }
 
   protected void setPermissions(String locn, String permissions) throws 
Exception {

Modified: 
hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- 
hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
 (original)
+++ 
hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
 Sat Aug 30 06:44:46 2014
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql.securi
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 import static org.mockito.Matchers.any;
 import static org.mockito.Mockito.reset;
 import static org.mockito.Mockito.verify;
@@ -141,6 +142,47 @@ public class TestHiveAuthorizerCheckInvo
   }
 
   @Test
+  public void testCreateTableWithDb() throws HiveAuthzPluginException, 
HiveAccessControlException,
+      CommandNeedRetryException {
+    final String newTable = "ctTableWithDb";
+    checkCreateViewOrTableWithDb(newTable, "create table " + dbName + "." + 
newTable + "(i int)");
+  }
+
+  @Test
+  public void testCreateViewWithDb() throws HiveAuthzPluginException, 
HiveAccessControlException,
+      CommandNeedRetryException {
+    final String newTable = "ctViewWithDb";
+    checkCreateViewOrTableWithDb(newTable, "create table " + dbName + "." + 
newTable + "(i int)");
+  }
+
+  private void checkCreateViewOrTableWithDb(String newTable, String cmd)
+      throws HiveAuthzPluginException, HiveAccessControlException {
+    reset(mockedAuthorizer);
+    int status = driver.compile(cmd);
+    assertEquals(0, status);
+
+    List<HivePrivilegeObject> outputs = 
getHivePrivilegeObjectInputs().getRight();
+    assertEquals("num outputs", 2, outputs.size());
+    for (HivePrivilegeObject output : outputs) {
+      switch (output.getType()) {
+      case DATABASE:
+        assertTrue("database name", 
output.getDbname().equalsIgnoreCase(dbName));
+        break;
+      case TABLE_OR_VIEW:
+        assertTrue("database name", 
output.getDbname().equalsIgnoreCase(dbName));
+        assertEqualsIgnoreCase("table name", output.getObjectName(), newTable);
+        break;
+      default:
+        fail("Unexpected type : " + output.getType());
+      }
+    }
+  }
+
+  private void assertEqualsIgnoreCase(String msg, String expected, String 
actual) {
+    assertEquals(msg, expected.toLowerCase(), actual.toLowerCase());
+  }
+
+  @Test
   public void testInputNoColumnsUsed() throws HiveAuthzPluginException, 
HiveAccessControlException,
       CommandNeedRetryException {
 

Modified: 
hive/branches/spark/itests/src/test/resources/testconfiguration.properties
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/itests/src/test/resources/testconfiguration.properties?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/itests/src/test/resources/testconfiguration.properties 
(original)
+++ hive/branches/spark/itests/src/test/resources/testconfiguration.properties 
Sat Aug 30 06:44:46 2014
@@ -86,6 +86,7 @@ minitez.query.files.shared=alter_merge_2
   mapreduce2.q,\
   merge1.q,\
   merge2.q,\
+  metadataonly1.q,\
   metadata_only_queries.q,\
   optimize_nullscan.q,\
   orc_analyze.q,\

Modified: 
hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessControllerForTest.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessControllerForTest.java?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- 
hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessControllerForTest.java
 (original)
+++ 
hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessControllerForTest.java
 Sat Aug 30 06:44:46 2014
@@ -30,7 +30,7 @@ import org.apache.hadoop.hive.ql.securit
  * To be used for testing purposes only!
  */
 @Private
-public class SQLStdHiveAccessControllerForTest extends 
SQLStdHiveAccessController {
+public class SQLStdHiveAccessControllerForTest extends 
SQLStdHiveAccessControllerWrapper {
 
   SQLStdHiveAccessControllerForTest(HiveMetastoreClientFactory 
metastoreClientFactory, HiveConf conf,
       HiveAuthenticationProvider authenticator, HiveAuthzSessionContext ctx) 
throws HiveAuthzPluginException {

Modified: 
hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidatorForTest.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidatorForTest.java?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- 
hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidatorForTest.java
 (original)
+++ 
hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidatorForTest.java
 Sat Aug 30 06:44:46 2014
@@ -38,7 +38,7 @@ public class SQLStdHiveAuthorizationVali
 
   public SQLStdHiveAuthorizationValidatorForTest(HiveMetastoreClientFactory 
metastoreClientFactory,
       HiveConf conf, HiveAuthenticationProvider authenticator,
-      SQLStdHiveAccessController privController) {
+      SQLStdHiveAccessControllerWrapper privController) {
     super(metastoreClientFactory, conf, authenticator, privController);
   }
 

Modified: 
hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactoryForTest.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactoryForTest.java?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- 
hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactoryForTest.java
 (original)
+++ 
hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactoryForTest.java
 Sat Aug 30 06:44:46 2014
@@ -32,7 +32,7 @@ public class SQLStdHiveAuthorizerFactory
   @Override
   public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory 
metastoreClientFactory,
       HiveConf conf, HiveAuthenticationProvider authenticator, 
HiveAuthzSessionContext ctx) throws HiveAuthzPluginException {
-    SQLStdHiveAccessController privilegeManager =
+    SQLStdHiveAccessControllerWrapper privilegeManager =
         new SQLStdHiveAccessControllerForTest(metastoreClientFactory, conf, 
authenticator, ctx);
     return new HiveAuthorizerImpl(
         privilegeManager,

Modified: 
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- 
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
 (original)
+++ 
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
 Sat Aug 30 06:44:46 2014
@@ -250,7 +250,7 @@ public class HiveMetaStore extends Thrif
     private static String currentUrl;
 
     private Warehouse wh; // hdfs warehouse
-    private final ThreadLocal<RawStore> threadLocalMS =
+    private static final ThreadLocal<RawStore> threadLocalMS =
         new ThreadLocal<RawStore>() {
           @Override
           protected synchronized RawStore initialValue() {
@@ -265,6 +265,14 @@ public class HiveMetaStore extends Thrif
       }
     };
 
+    public static RawStore getRawStore() {
+      return threadLocalMS.get();
+    }
+
+    public static void removeRawStore() {
+      threadLocalMS.remove();
+    }
+
     // Thread local configuration is needed as many threads could make changes
     // to the conf using the connection hook
     private final ThreadLocal<Configuration> threadLocalConf =
@@ -384,6 +392,7 @@ public class HiveMetaStore extends Thrif
       }
     }
 
+    @Override
     public void init() throws MetaException {
       rawStoreClassName = 
hiveConf.getVar(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL);
       initListeners = MetaStoreUtils.getMetaStoreListeners(

Modified: 
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- 
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
 (original)
+++ 
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
 Sat Aug 30 06:44:46 2014
@@ -45,9 +45,11 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
+import org.apache.hadoop.hive.common.HiveStatsUtils;
 import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.common.StatsSetupConst;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
@@ -163,19 +165,25 @@ public class MetaStoreUtils {
     return updateUnpartitionedTableStatsFast(db, tbl, wh, madeDir, false);
   }
 
+  public static boolean updateUnpartitionedTableStatsFast(Database db, Table 
tbl, Warehouse wh,
+      boolean madeDir, boolean forceRecompute) throws MetaException {
+    return updateUnpartitionedTableStatsFast(tbl,
+        wh.getFileStatusesForUnpartitionedTable(db, tbl), madeDir, 
forceRecompute);
+  }
+
   /**
    * Updates the numFiles and totalSize parameters for the passed 
unpartitioned Table by querying
    * the warehouse if the passed Table does not already have values for these 
parameters.
-   * @param db
    * @param tbl
-   * @param wh
+   * @param fileStatus
    * @param newDir if true, the directory was just created and can be assumed 
to be empty
    * @param forceRecompute Recompute stats even if the passed Table already has
    * these parameters set
    * @return true if the stats were updated, false otherwise
    */
-  public static boolean updateUnpartitionedTableStatsFast(Database db, Table 
tbl, Warehouse wh,
-      boolean newDir, boolean forceRecompute) throws MetaException {
+  public static boolean updateUnpartitionedTableStatsFast(Table tbl,
+      FileStatus[] fileStatus, boolean newDir, boolean forceRecompute) throws 
MetaException {
+
     Map<String,String> params = tbl.getParameters();
     boolean updated = false;
     if (forceRecompute ||
@@ -188,7 +196,6 @@ public class MetaStoreUtils {
         // The table location already exists and may contain data.
         // Let's try to populate those stats that don't require full scan.
         LOG.info("Updating table stats fast for " + tbl.getTableName());
-        FileStatus[] fileStatus = wh.getFileStatusesForUnpartitionedTable(db, 
tbl);
         populateQuickStats(fileStatus, params);
         LOG.info("Updated size of table " + tbl.getTableName() +" to "+ 
params.get(StatsSetupConst.TOTAL_SIZE));
         
if(!params.containsKey(StatsSetupConst.STATS_GENERATED_VIA_STATS_TASK)) {
@@ -1043,11 +1050,17 @@ public class MetaStoreUtils {
 
   public static void startMetaStore(final int port,
       final HadoopThriftAuthBridge bridge) throws Exception {
+    startMetaStore(port, bridge, new HiveConf(HMSHandler.class));
+  }
+
+  public static void startMetaStore(final int port,
+      final HadoopThriftAuthBridge bridge, final HiveConf hiveConf)
+      throws Exception{
     Thread thread = new Thread(new Runnable() {
       @Override
       public void run() {
         try {
-          HiveMetaStore.startMetaStore(port, bridge);
+          HiveMetaStore.startMetaStore(port, bridge, hiveConf);
         } catch (Throwable e) {
           LOG.error("Metastore Thrift Server threw an exception...",e);
         }
@@ -1057,6 +1070,7 @@ public class MetaStoreUtils {
     thread.start();
     loopUntilHMSReady(port);
   }
+
   /**
    * A simple connect test to make sure that the metastore is up
    * @throws Exception

Modified: 
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- 
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
 (original)
+++ 
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
 Sat Aug 30 06:44:46 2014
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.metastore
 
 import static org.apache.commons.lang.StringUtils.join;
 
+import java.io.IOException;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.ArrayList;
@@ -129,6 +130,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.parser.ExpressionTree.Operator;
 import org.apache.hadoop.hive.metastore.parser.FilterLexer;
 import org.apache.hadoop.hive.metastore.parser.FilterParser;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.thrift.TException;
 import org.datanucleus.store.rdbms.exceptions.MissingTableException;
@@ -252,6 +254,8 @@ public class ObjectStore implements RawS
       expressionProxy = createExpressionProxy(hiveConf);
       directSql = new MetaStoreDirectSql(pm);
     }
+    LOG.debug("RawStore: " + this + ", with PersistenceManager: " + pm +
+        " created in the thread with id: " + Thread.currentThread().getId());
   }
 
   /**
@@ -295,6 +299,16 @@ public class ObjectStore implements RawS
         }
       }
     }
+    // Password may no longer be in the conf, use getPassword()
+    try {
+      String passwd =
+          ShimLoader.getHadoopShims().getPassword(conf, 
HiveConf.ConfVars.METASTOREPWD.varname);
+      if (passwd != null && !passwd.isEmpty()) {
+        prop.setProperty(HiveConf.ConfVars.METASTOREPWD.varname, passwd);
+      }
+    } catch (IOException err) {
+      throw new RuntimeException("Error getting metastore password: " + 
err.getMessage(), err);
+    }
 
     if (LOG.isDebugEnabled()) {
       for (Entry<Object, Object> e : prop.entrySet()) {
@@ -343,6 +357,8 @@ public class ObjectStore implements RawS
   @Override
   public void shutdown() {
     if (pm != null) {
+      LOG.debug("RawStore: " + this + ", with PersistenceManager: " + pm +
+          " will be shutdown");
       pm.close();
     }
   }


Reply via email to