[4/4] incubator-hawq git commit: HAWQ-1628. Add hdfs protocol for pluggable storage framework

2018-08-14 Thread huor
HAWQ-1628. Add hdfs protocol for pluggable storage framework


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/48ff52c9
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/48ff52c9
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/48ff52c9

Branch: refs/heads/master
Commit: 48ff52c9a831f46e1a5513aac9d3de9f625ce329
Parents: 9f33d8d
Author: oushu1wangziming1 
Authored: Tue Jul 17 15:43:36 2018 +0800
Committer: Ruilong Huo 
Committed: Wed Aug 15 10:32:56 2018 +0800

--
 contrib/Makefile|1 +
 contrib/exthdfs/Makefile|   34 +
 contrib/exthdfs/common.h|   38 +
 contrib/exthdfs/exthdfs.c   |  469 
 contrib/extprotocol/gpextprotocol.c |2 +-
 src/backend/access/external/fileam.c|  740 
 src/backend/access/external/plugstorage.c   |   30 +-
 src/backend/catalog/cdb_external_extensions.sql |   12 +
 src/backend/catalog/heap.c  |   79 +-
 src/backend/cdb/cdbdatalocality.c   |  434 ++-
 src/backend/cdb/cdbpartition.c  |   24 +-
 src/backend/commands/analyze.c  |  417 ++-
 src/backend/commands/copy.c |   22 +-
 src/backend/commands/indexcmds.c|1 +
 src/backend/commands/sequence.c |   22 +-
 src/backend/commands/tablecmds.c|  870 +++---
 src/backend/commands/typecmds.c |   19 +-
 src/backend/commands/user.c |   35 +-
 src/backend/commands/view.c |   19 +-
 src/backend/executor/execDML.c  |   20 +-
 src/backend/nodes/copyfuncs.c   |   40 +-
 src/backend/nodes/equalfuncs.c  |   39 +-
 src/backend/nodes/outfast.c |   40 +-
 src/backend/nodes/outfuncs.c|   43 +-
 src/backend/nodes/readfast.c|   60 +-
 src/backend/nodes/readfuncs.c   |   44 +-
 src/backend/optimizer/plan/createplan.c |   44 +-
 src/backend/optimizer/plan/planner.c|   10 +-
 src/backend/parser/analyze.c| 1098 +-
 src/backend/parser/gram.y   |  178 ++-
 src/backend/tcop/utility.c  |   48 +-
 src/backend/utils/misc/uriparser.c  |   10 +-
 src/include/access/fileam.h |   13 +-
 src/include/access/formatter.h  |   10 +
 src/include/access/plugstorage.h|6 +-
 src/include/catalog/pg_exttable.h   |7 +-
 src/include/cdb/cdbdatalocality.h   |   11 +-
 src/include/commands/tablecmds.h|4 +-
 src/include/nodes/parsenodes.h  |  118 +-
 src/include/parser/analyze.h|   50 +-
 src/include/utils/uri.h |5 +-
 41 files changed, 4016 insertions(+), 1150 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/48ff52c9/contrib/Makefile
--
diff --git a/contrib/Makefile b/contrib/Makefile
index 695e92a..e5daff9 100644
--- a/contrib/Makefile
+++ b/contrib/Makefile
@@ -9,6 +9,7 @@ WANTED_DIRS = \
extprotocol \
gp_cancel_query \
formatter_fixedwidth \
+   exthdfs\
hawq-hadoop
 
 ifeq ($(with_pgcrypto), yes)

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/48ff52c9/contrib/exthdfs/Makefile
--
diff --git a/contrib/exthdfs/Makefile b/contrib/exthdfs/Makefile
new file mode 100644
index 000..e247664
--- /dev/null
+++ b/contrib/exthdfs/Makefile
@@ -0,0 +1,34 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+MODULE_big = exthdfs
+OBJS   = exthdfs.o
+
+PG_CPPFLAGS = -I$(libpq_srcdir)
+PG_LIBS = $(libpq_pgpo

[2/4] incubator-hawq git commit: HAWQ-1628. Add hdfs protocol for pluggable storage framework

2018-08-14 Thread huor
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/48ff52c9/src/backend/nodes/outfuncs.c
--
diff --git a/src/backend/nodes/outfuncs.c b/src/backend/nodes/outfuncs.c
index cf6bf04..5894184 100644
--- a/src/backend/nodes/outfuncs.c
+++ b/src/backend/nodes/outfuncs.c
@@ -2111,15 +2111,18 @@ _outCreateStmt(StringInfo str, CreateStmt *node)
 {
WRITE_NODE_TYPE("CREATESTMT");
 
-   WRITE_NODE_FIELD(relation);
-   WRITE_NODE_FIELD(tableElts);
-   WRITE_NODE_FIELD(inhRelations);
-   WRITE_NODE_FIELD(constraints);
-   WRITE_NODE_FIELD(options);
-   WRITE_ENUM_FIELD(oncommit, OnCommitAction);
-   WRITE_STRING_FIELD(tablespacename);
-   WRITE_NODE_FIELD(distributedBy);
-   WRITE_NODE_FIELD(partitionBy);
+   WRITE_CHAR_FIELD(base.relKind);
+   WRITE_NODE_FIELD(base.relation);
+   WRITE_NODE_FIELD(base.tableElts);
+   WRITE_NODE_FIELD(base.inhRelations);
+   WRITE_NODE_FIELD(base.constraints);
+   WRITE_NODE_FIELD(base.options);
+   WRITE_ENUM_FIELD(base.oncommit, OnCommitAction);
+   WRITE_STRING_FIELD(base.tablespacename);
+   WRITE_NODE_FIELD(base.distributedBy);
+   WRITE_BOOL_FIELD(base.is_part_child);
+   WRITE_BOOL_FIELD(base.is_add_part);
+   WRITE_NODE_FIELD(base.partitionBy);
WRITE_OID_FIELD(oidInfo.relOid);
WRITE_OID_FIELD(oidInfo.comptypeOid);
WRITE_OID_FIELD(oidInfo.toastOid);
@@ -2131,13 +2134,10 @@ _outCreateStmt(StringInfo str, CreateStmt *node)
WRITE_OID_FIELD(oidInfo.aoblkdirOid);
WRITE_OID_FIELD(oidInfo.aoblkdirIndexOid);
WRITE_OID_FIELD(oidInfo.aoblkdirComptypeOid);
-   WRITE_CHAR_FIELD(relKind);
WRITE_CHAR_FIELD(relStorage);
/* policy omitted */
/* postCreate omitted */
WRITE_NODE_FIELD(deferredStmts);
-   WRITE_BOOL_FIELD(is_part_child);
-   WRITE_BOOL_FIELD(is_add_part);
WRITE_BOOL_FIELD(is_split_part);
WRITE_OID_FIELD(ownerid);
WRITE_BOOL_FIELD(buildAoBlkdir);
@@ -2170,16 +2170,27 @@ _outCreateExternalStmt(StringInfo str, 
CreateExternalStmt *node)
 {
WRITE_NODE_TYPE("CREATEEXTERNALSTMT");
 
-   WRITE_NODE_FIELD(relation);
-   WRITE_NODE_FIELD(tableElts);
+   WRITE_CHAR_FIELD(base.relKind);
+   WRITE_NODE_FIELD(base.relation);
+   WRITE_NODE_FIELD(base.tableElts);
+   WRITE_NODE_FIELD(base.inhRelations);
+   WRITE_NODE_FIELD(base.constraints);
+   WRITE_NODE_FIELD(base.options);
+   WRITE_ENUM_FIELD(base.oncommit, OnCommitAction);
+   WRITE_STRING_FIELD(base.tablespacename);
+   WRITE_NODE_FIELD(base.distributedBy);
+   WRITE_BOOL_FIELD(base.is_part_child);
+   WRITE_BOOL_FIELD(base.is_add_part);
+   WRITE_NODE_FIELD(base.partitionBy);
WRITE_NODE_FIELD(exttypedesc);
WRITE_STRING_FIELD(format);
-   WRITE_NODE_FIELD(formatOpts);
WRITE_BOOL_FIELD(isweb);
WRITE_BOOL_FIELD(iswritable);
+   WRITE_BOOL_FIELD(isexternal);
+   WRITE_BOOL_FIELD(forceCreateDir);
+   WRITE_STRING_FIELD(parentPath);
WRITE_NODE_FIELD(sreh);
WRITE_NODE_FIELD(encoding);
-   WRITE_NODE_FIELD(distributedBy);
 }
 
 static void

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/48ff52c9/src/backend/nodes/readfast.c
--
diff --git a/src/backend/nodes/readfast.c b/src/backend/nodes/readfast.c
index f9aee80..2cc7035 100644
--- a/src/backend/nodes/readfast.c
+++ b/src/backend/nodes/readfast.c
@@ -2033,14 +2033,17 @@ _readCreateStmt(const char ** str)
 {
READ_LOCALS(CreateStmt);
 
-   READ_NODE_FIELD(relation);
-   READ_NODE_FIELD(tableElts);
-   READ_NODE_FIELD(inhRelations);
-   READ_NODE_FIELD(constraints);
-   READ_NODE_FIELD(options);
-   READ_ENUM_FIELD(oncommit,OnCommitAction);
-   READ_STRING_FIELD(tablespacename);
-   READ_NODE_FIELD(distributedBy);
+   READ_CHAR_FIELD(base.relKind);
+   READ_NODE_FIELD(base.relation);
+   READ_NODE_FIELD(base.tableElts);
+   READ_NODE_FIELD(base.inhRelations);
+   READ_NODE_FIELD(base.constraints);
+   READ_NODE_FIELD(base.options);
+   READ_ENUM_FIELD(base.oncommit,OnCommitAction);
+   READ_STRING_FIELD(base.tablespacename);
+   READ_NODE_FIELD(base.distributedBy);
+   READ_BOOL_FIELD(base.is_part_child);
+   READ_BOOL_FIELD(base.is_add_part);
READ_OID_FIELD(oidInfo.relOid);
READ_OID_FIELD(oidInfo.comptypeOid);
READ_OID_FIELD(oidInfo.toastOid);
@@ -2052,13 +2055,10 @@ _readCreateStmt(const char ** str)
READ_OID_FIELD(oidInfo.aoblkdirOid);
READ_OID_FIELD(oidInfo.aoblkdirIndexOid);
READ_OID_FIELD(oidInfo.aoblkdirComptypeOid);
-   READ_CHAR_FIELD(relKind);
READ_CHAR_FIELD(relStorage);
/* policy omitted */
/* postCreate - for analysis, QD only */
/* 

[1/4] incubator-hawq git commit: HAWQ-1628. Add hdfs protocol for pluggable storage framework [Forced Update!]

2018-08-14 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 1c189fc12 -> 48ff52c9a (forced update)


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/48ff52c9/src/include/cdb/cdbdatalocality.h
--
diff --git a/src/include/cdb/cdbdatalocality.h 
b/src/include/cdb/cdbdatalocality.h
index c3753ce..0afc45c 100644
--- a/src/include/cdb/cdbdatalocality.h
+++ b/src/include/cdb/cdbdatalocality.h
@@ -32,6 +32,7 @@
 #include "catalog/gp_policy.h"
 #include "nodes/parsenodes.h"
 #include "executor/execdesc.h"
+#include "catalog/pg_exttable.h"
 
 /*
  * structure containing information about data residence
@@ -71,12 +72,18 @@ typedef struct VirtualSegmentNode
char *hostname;
 } VirtualSegmentNode;
 
+typedef struct blocklocation_file{
+   BlockLocation *locations;
+   int block_num;
+   char *file_uri;
+}blocklocation_file;
+
 /*
  * calculate_planner_segment_num: based on the parse tree,
  * we calculate the appropriate planner segment_num.
  */
-SplitAllocResult * calculate_planner_segment_num(Query *query, 
QueryResourceLife resourceLife,
-List *rtable, GpPolicy 
*intoPolicy, int sliceNum, int fixedVsegNum);
+SplitAllocResult * calculate_planner_segment_num(PlannedStmt *plannedstmt, 
Query *query,
+   QueryResourceLife resourceLife, int fixedVsegNum);
 
 /*
  * udf_collector_walker: the routine to file udfs.

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/48ff52c9/src/include/commands/tablecmds.h
--
diff --git a/src/include/commands/tablecmds.h b/src/include/commands/tablecmds.h
index 8404a7f..e23df2f 100644
--- a/src/include/commands/tablecmds.h
+++ b/src/include/commands/tablecmds.h
@@ -52,7 +52,7 @@ typedef struct AttrMapContext{
 
 extern const char *synthetic_sql;
 
-extern Oid DefineRelation(CreateStmt *stmt, char relkind, char relstorage);
+extern Oid DefineRelation(CreateStmt *stmt, char relkind, char relstorage, 
const char *formattername);
 
 extern voidDefineExternalRelation(CreateExternalStmt *stmt);
 
@@ -89,6 +89,8 @@ extern void CheckTableNotInUse(Relation rel, const char 
*stmt);
 
 extern void ExecuteTruncate(TruncateStmt *stmt);
 
+
+
 extern void renameatt(Oid myrelid,
  const char *oldattname,
  const char *newattname,

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/48ff52c9/src/include/nodes/parsenodes.h
--
diff --git a/src/include/nodes/parsenodes.h b/src/include/nodes/parsenodes.h
index a9ca4a0..173a35c 100644
--- a/src/include/nodes/parsenodes.h
+++ b/src/include/nodes/parsenodes.h
@@ -1354,14 +1354,15 @@ typedef struct GrantRoleStmt
  * Node that represents the single row error handling (SREH) clause.
  * used in COPY and External Tables.
  */
-typedef struct SingleRowErrorDesc
-{
-   NodeTag type;
-   RangeVar*errtable;  /* error table for data 
format errors */
-   int rejectlimit;/* per segment error 
reject limit */
-   boolis_keep;/* true if KEEP 
indicated (COPY only) */
-   boolis_limit_in_rows;   /* true for ROWS false for 
PERCENT */
-   boolreusing_existing_errtable;  /* var used later in 
trasform... */
+typedef struct SingleRowErrorDesc {
+  NodeTag type;
+  RangeVar *errtable; /* error table for data format errors */
+  int rejectlimit;/* per segment error reject limit */
+  bool is_keep;   /* true if KEEP indicated (COPY only) */
+  bool is_limit_in_rows;  /* true for ROWS false for PERCENT */
+  bool reusing_existing_errtable; /* var used later in trasform... */
+  bool is_hdfs_protocol_text; /* hdfs protocol text format table */
+  char *hdfsLoc; /* error table location for hdfs protocol text only */
 } SingleRowErrorDesc;
 
 /* --
@@ -1403,37 +1404,40 @@ typedef struct CopyStmt
  * implementation).
  * --
  */
-
-typedef struct CreateStmt
-{
-   NodeTag type;
-   RangeVar   *relation;   /* relation to create */
-   List   *tableElts;  /* column definitions (list of 
ColumnDef) */
-   List   *inhRelations;   /* relations to inherit from (list of
-* inhRelation) 
*/
-   List   *constraints;/* constraints (list of Constraint 
nodes) */
-   List   *options;/* options from WITH clause */
-   OnCommitAction oncommit;/* what do we do at COMMIT? */
-   char   *tablespacename; /* table space to use, or NULL */
-   List   *distributedBy;   /* what columns we distribute the data by 
*/
-   Node   *

[3/4] incubator-hawq git commit: HAWQ-1628. Add hdfs protocol for pluggable storage framework

2018-08-14 Thread huor
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/48ff52c9/src/backend/commands/analyze.c
--
diff --git a/src/backend/commands/analyze.c b/src/backend/commands/analyze.c
index ddd9677..ab70e5a 100644
--- a/src/backend/commands/analyze.c
+++ b/src/backend/commands/analyze.c
@@ -54,6 +54,7 @@
 #include "access/aosegfiles.h"
 #include "access/parquetsegfiles.h"
 #include "access/hash.h"
+#include "access/xact.h"
 #include "catalog/index.h"
 #include "catalog/indexing.h"
 #include "catalog/namespace.h"
@@ -64,6 +65,7 @@
 #include "cdb/cdbheap.h"
 #include "cdb/cdbhash.h"
 #include "commands/vacuum.h"
+#include "commands/dbcommands.h"
 #include "executor/executor.h"
 #include "lib/stringinfo.h"
 #include "libpq/pqformat.h" /* pq_beginmessage() etc. */
@@ -80,12 +82,15 @@
 #include "utils/memutils.h"
 #include "utils/syscache.h"
 #include "utils/tuplesort.h"
+#include "utils/palloc.h"
 #include "utils/pg_locale.h"
 #include "utils/builtins.h"
 #include "utils/inval.h"
+#include "utils/uri.h"
 #include "cdb/cdbvars.h"
 #include "cdb/cdbanalyze.h"
 #include "cdb/cdbrelsize.h"
+#include "cdb/cdbdatalocality.h"
 #include "utils/fmgroids.h"
 #include "storage/backendid.h"
 #include "executor/spi.h"
@@ -93,6 +98,8 @@
 #include "catalog/pg_namespace.h"
 #include "utils/debugbreak.h"
 #include "nodes/makefuncs.h"
+#include "nodes/nodes.h"
+#include "nodes/parsenodes.h"
 
 #include "commands/analyzeutils.h"
 
@@ -153,9 +160,15 @@ static List*buildExplicitAttributeNames(Oid 
relationOid, VacuumStmt *stmt);
 static void gp_statistics_estimate_reltuples_relpages_heap(Relation rel, 
float4 *reltuples, float4 *relpages);
 static void gp_statistics_estimate_reltuples_relpages_ao_rows(Relation rel, 
float4 *reltuples, float4 *relpages);
 static void gp_statistics_estimate_reltuples_relpages_parquet(Relation rel, 
float4 *reltuples, float4 *relpages);
+static void gp_statistics_estimate_reltuples_relpages_external(Relation rel, 
float4 *relTuples, float4 *relPages);
 static void analyzeEstimateReltuplesRelpages(Oid relationOid, float4 
*relTuples, float4 *relPages, bool rootonly);
 static void analyzeEstimateIndexpages(Oid relationOid, Oid indexOid, float4 
*indexPages);
 
+static void getExternalRelTuples(Oid relationOid, float4 *relTuples);
+static void getExternalRelPages(Oid relationOid, float4 *relPages , Relation 
rel);
+static float4 getExtrelPagesHDFS(Uri *uri);
+static bool isExternalHDFSProtocol(Oid relOid);
+
 /* Attribute-type related functions */
 static bool isOrderedAndHashable(Oid relationOid, const char *attributeName);
 static bool isBoolType(Oid relationOid, const char *attributeName);
@@ -442,6 +455,17 @@ void analyzeStmt(VacuumStmt *stmt, List *relids, int 
preferred_seg_num)
"Please run ANALYZE on 
the root partition table.",

get_rel_name(relationOid;
}
+   else if (!isExternalHDFSProtocol(relationOid))
+   {
+   /*
+* Support analyze for external table.
+* For now, HDFS protocol external 
table is supported.
+*/
+   ereport(WARNING,
+(errmsg("skipping \"%s\" --- cannot 
analyze external table with non-HDFS or non-MAGMA protocol. "
+"Please run ANALYZE on 
external table with HDFS or MAGMA protocol.",
+get_rel_name(relationOid;
+   }
else
{
lRelOids = list_make1_oid(relationOid);
@@ -989,8 +1013,10 @@ static List* analyzableRelations(bool rootonly, List 
**fullRelOids)
while (HeapTupleIsValid(tuple = caql_getnext(pcqCtx)))
{
Oid candidateOid = HeapTupleGetOid(tuple);
-   if (analyzePermitted(candidateOid)
-   && candidateOid != 
StatisticRelationId)
+   bool isExternalHDFS = isExternalHDFSProtocol(candidateOid);
+   if (analyzePermitted(candidateOid) &&
+   candidateOid != StatisticRelationId &&
+   isExternalHDFS)
{
*fullRelOids = lappend_oid(*fullRelOids, candidateOid);
}
@@ -998,8 +1024,9 @@ static List* analyzableRelations(bool rootonly, List 
**fullRelOids)
{
continue;
}
-   if (analyzePermitted(candidateOid)
-   && candidateOid != StatisticRelationId)
+   if (analyzePermitted(candidateOid) &&
+   candidateOid !

[2/4] incubator-hawq git commit: Add hdfs protocol for pluggable storage framework

2018-08-13 Thread huor
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/1c189fc1/src/backend/nodes/outfuncs.c
--
diff --git a/src/backend/nodes/outfuncs.c b/src/backend/nodes/outfuncs.c
index cf6bf04..5894184 100644
--- a/src/backend/nodes/outfuncs.c
+++ b/src/backend/nodes/outfuncs.c
@@ -2111,15 +2111,18 @@ _outCreateStmt(StringInfo str, CreateStmt *node)
 {
WRITE_NODE_TYPE("CREATESTMT");
 
-   WRITE_NODE_FIELD(relation);
-   WRITE_NODE_FIELD(tableElts);
-   WRITE_NODE_FIELD(inhRelations);
-   WRITE_NODE_FIELD(constraints);
-   WRITE_NODE_FIELD(options);
-   WRITE_ENUM_FIELD(oncommit, OnCommitAction);
-   WRITE_STRING_FIELD(tablespacename);
-   WRITE_NODE_FIELD(distributedBy);
-   WRITE_NODE_FIELD(partitionBy);
+   WRITE_CHAR_FIELD(base.relKind);
+   WRITE_NODE_FIELD(base.relation);
+   WRITE_NODE_FIELD(base.tableElts);
+   WRITE_NODE_FIELD(base.inhRelations);
+   WRITE_NODE_FIELD(base.constraints);
+   WRITE_NODE_FIELD(base.options);
+   WRITE_ENUM_FIELD(base.oncommit, OnCommitAction);
+   WRITE_STRING_FIELD(base.tablespacename);
+   WRITE_NODE_FIELD(base.distributedBy);
+   WRITE_BOOL_FIELD(base.is_part_child);
+   WRITE_BOOL_FIELD(base.is_add_part);
+   WRITE_NODE_FIELD(base.partitionBy);
WRITE_OID_FIELD(oidInfo.relOid);
WRITE_OID_FIELD(oidInfo.comptypeOid);
WRITE_OID_FIELD(oidInfo.toastOid);
@@ -2131,13 +2134,10 @@ _outCreateStmt(StringInfo str, CreateStmt *node)
WRITE_OID_FIELD(oidInfo.aoblkdirOid);
WRITE_OID_FIELD(oidInfo.aoblkdirIndexOid);
WRITE_OID_FIELD(oidInfo.aoblkdirComptypeOid);
-   WRITE_CHAR_FIELD(relKind);
WRITE_CHAR_FIELD(relStorage);
/* policy omitted */
/* postCreate omitted */
WRITE_NODE_FIELD(deferredStmts);
-   WRITE_BOOL_FIELD(is_part_child);
-   WRITE_BOOL_FIELD(is_add_part);
WRITE_BOOL_FIELD(is_split_part);
WRITE_OID_FIELD(ownerid);
WRITE_BOOL_FIELD(buildAoBlkdir);
@@ -2170,16 +2170,27 @@ _outCreateExternalStmt(StringInfo str, 
CreateExternalStmt *node)
 {
WRITE_NODE_TYPE("CREATEEXTERNALSTMT");
 
-   WRITE_NODE_FIELD(relation);
-   WRITE_NODE_FIELD(tableElts);
+   WRITE_CHAR_FIELD(base.relKind);
+   WRITE_NODE_FIELD(base.relation);
+   WRITE_NODE_FIELD(base.tableElts);
+   WRITE_NODE_FIELD(base.inhRelations);
+   WRITE_NODE_FIELD(base.constraints);
+   WRITE_NODE_FIELD(base.options);
+   WRITE_ENUM_FIELD(base.oncommit, OnCommitAction);
+   WRITE_STRING_FIELD(base.tablespacename);
+   WRITE_NODE_FIELD(base.distributedBy);
+   WRITE_BOOL_FIELD(base.is_part_child);
+   WRITE_BOOL_FIELD(base.is_add_part);
+   WRITE_NODE_FIELD(base.partitionBy);
WRITE_NODE_FIELD(exttypedesc);
WRITE_STRING_FIELD(format);
-   WRITE_NODE_FIELD(formatOpts);
WRITE_BOOL_FIELD(isweb);
WRITE_BOOL_FIELD(iswritable);
+   WRITE_BOOL_FIELD(isexternal);
+   WRITE_BOOL_FIELD(forceCreateDir);
+   WRITE_STRING_FIELD(parentPath);
WRITE_NODE_FIELD(sreh);
WRITE_NODE_FIELD(encoding);
-   WRITE_NODE_FIELD(distributedBy);
 }
 
 static void

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/1c189fc1/src/backend/nodes/readfast.c
--
diff --git a/src/backend/nodes/readfast.c b/src/backend/nodes/readfast.c
index f9aee80..2cc7035 100644
--- a/src/backend/nodes/readfast.c
+++ b/src/backend/nodes/readfast.c
@@ -2033,14 +2033,17 @@ _readCreateStmt(const char ** str)
 {
READ_LOCALS(CreateStmt);
 
-   READ_NODE_FIELD(relation);
-   READ_NODE_FIELD(tableElts);
-   READ_NODE_FIELD(inhRelations);
-   READ_NODE_FIELD(constraints);
-   READ_NODE_FIELD(options);
-   READ_ENUM_FIELD(oncommit,OnCommitAction);
-   READ_STRING_FIELD(tablespacename);
-   READ_NODE_FIELD(distributedBy);
+   READ_CHAR_FIELD(base.relKind);
+   READ_NODE_FIELD(base.relation);
+   READ_NODE_FIELD(base.tableElts);
+   READ_NODE_FIELD(base.inhRelations);
+   READ_NODE_FIELD(base.constraints);
+   READ_NODE_FIELD(base.options);
+   READ_ENUM_FIELD(base.oncommit,OnCommitAction);
+   READ_STRING_FIELD(base.tablespacename);
+   READ_NODE_FIELD(base.distributedBy);
+   READ_BOOL_FIELD(base.is_part_child);
+   READ_BOOL_FIELD(base.is_add_part);
READ_OID_FIELD(oidInfo.relOid);
READ_OID_FIELD(oidInfo.comptypeOid);
READ_OID_FIELD(oidInfo.toastOid);
@@ -2052,13 +2055,10 @@ _readCreateStmt(const char ** str)
READ_OID_FIELD(oidInfo.aoblkdirOid);
READ_OID_FIELD(oidInfo.aoblkdirIndexOid);
READ_OID_FIELD(oidInfo.aoblkdirComptypeOid);
-   READ_CHAR_FIELD(relKind);
READ_CHAR_FIELD(relStorage);
/* policy omitted */
/* postCreate - for analysis, QD only */
/* 

[3/4] incubator-hawq git commit: Add hdfs protocol for pluggable storage framework

2018-08-13 Thread huor
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/1c189fc1/src/backend/commands/analyze.c
--
diff --git a/src/backend/commands/analyze.c b/src/backend/commands/analyze.c
index ddd9677..ab70e5a 100644
--- a/src/backend/commands/analyze.c
+++ b/src/backend/commands/analyze.c
@@ -54,6 +54,7 @@
 #include "access/aosegfiles.h"
 #include "access/parquetsegfiles.h"
 #include "access/hash.h"
+#include "access/xact.h"
 #include "catalog/index.h"
 #include "catalog/indexing.h"
 #include "catalog/namespace.h"
@@ -64,6 +65,7 @@
 #include "cdb/cdbheap.h"
 #include "cdb/cdbhash.h"
 #include "commands/vacuum.h"
+#include "commands/dbcommands.h"
 #include "executor/executor.h"
 #include "lib/stringinfo.h"
 #include "libpq/pqformat.h" /* pq_beginmessage() etc. */
@@ -80,12 +82,15 @@
 #include "utils/memutils.h"
 #include "utils/syscache.h"
 #include "utils/tuplesort.h"
+#include "utils/palloc.h"
 #include "utils/pg_locale.h"
 #include "utils/builtins.h"
 #include "utils/inval.h"
+#include "utils/uri.h"
 #include "cdb/cdbvars.h"
 #include "cdb/cdbanalyze.h"
 #include "cdb/cdbrelsize.h"
+#include "cdb/cdbdatalocality.h"
 #include "utils/fmgroids.h"
 #include "storage/backendid.h"
 #include "executor/spi.h"
@@ -93,6 +98,8 @@
 #include "catalog/pg_namespace.h"
 #include "utils/debugbreak.h"
 #include "nodes/makefuncs.h"
+#include "nodes/nodes.h"
+#include "nodes/parsenodes.h"
 
 #include "commands/analyzeutils.h"
 
@@ -153,9 +160,15 @@ static List*buildExplicitAttributeNames(Oid 
relationOid, VacuumStmt *stmt);
 static void gp_statistics_estimate_reltuples_relpages_heap(Relation rel, 
float4 *reltuples, float4 *relpages);
 static void gp_statistics_estimate_reltuples_relpages_ao_rows(Relation rel, 
float4 *reltuples, float4 *relpages);
 static void gp_statistics_estimate_reltuples_relpages_parquet(Relation rel, 
float4 *reltuples, float4 *relpages);
+static void gp_statistics_estimate_reltuples_relpages_external(Relation rel, 
float4 *relTuples, float4 *relPages);
 static void analyzeEstimateReltuplesRelpages(Oid relationOid, float4 
*relTuples, float4 *relPages, bool rootonly);
 static void analyzeEstimateIndexpages(Oid relationOid, Oid indexOid, float4 
*indexPages);
 
+static void getExternalRelTuples(Oid relationOid, float4 *relTuples);
+static void getExternalRelPages(Oid relationOid, float4 *relPages , Relation 
rel);
+static float4 getExtrelPagesHDFS(Uri *uri);
+static bool isExternalHDFSProtocol(Oid relOid);
+
 /* Attribute-type related functions */
 static bool isOrderedAndHashable(Oid relationOid, const char *attributeName);
 static bool isBoolType(Oid relationOid, const char *attributeName);
@@ -442,6 +455,17 @@ void analyzeStmt(VacuumStmt *stmt, List *relids, int 
preferred_seg_num)
"Please run ANALYZE on 
the root partition table.",

get_rel_name(relationOid;
}
+   else if (!isExternalHDFSProtocol(relationOid))
+   {
+   /*
+* Support analyze for external table.
+* For now, HDFS protocol external 
table is supported.
+*/
+   ereport(WARNING,
+(errmsg("skipping \"%s\" --- cannot 
analyze external table with non-HDFS or non-MAGMA protocol. "
+"Please run ANALYZE on 
external table with HDFS or MAGMA protocol.",
+get_rel_name(relationOid;
+   }
else
{
lRelOids = list_make1_oid(relationOid);
@@ -989,8 +1013,10 @@ static List* analyzableRelations(bool rootonly, List 
**fullRelOids)
while (HeapTupleIsValid(tuple = caql_getnext(pcqCtx)))
{
Oid candidateOid = HeapTupleGetOid(tuple);
-   if (analyzePermitted(candidateOid)
-   && candidateOid != 
StatisticRelationId)
+   bool isExternalHDFS = isExternalHDFSProtocol(candidateOid);
+   if (analyzePermitted(candidateOid) &&
+   candidateOid != StatisticRelationId &&
+   isExternalHDFS)
{
*fullRelOids = lappend_oid(*fullRelOids, candidateOid);
}
@@ -998,8 +1024,9 @@ static List* analyzableRelations(bool rootonly, List 
**fullRelOids)
{
continue;
}
-   if (analyzePermitted(candidateOid)
-   && candidateOid != StatisticRelationId)
+   if (analyzePermitted(candidateOid) &&
+   candidateOid !

[4/4] incubator-hawq git commit: Add hdfs protocol for pluggable storage framework

2018-08-13 Thread huor
Add hdfs protocol for pluggable storage framework


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/1c189fc1
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/1c189fc1
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/1c189fc1

Branch: refs/heads/master
Commit: 1c189fc12bf357bed36a4ef85d973a49eb26cd28
Parents: 9f33d8d
Author: oushu1wangziming1 
Authored: Tue Jul 17 15:43:36 2018 +0800
Committer: Ruilong Huo 
Committed: Tue Aug 14 10:14:46 2018 +0800

--
 contrib/Makefile|1 +
 contrib/exthdfs/Makefile|   34 +
 contrib/exthdfs/common.h|   38 +
 contrib/exthdfs/exthdfs.c   |  469 
 contrib/extprotocol/gpextprotocol.c |2 +-
 src/backend/access/external/fileam.c|  740 
 src/backend/access/external/plugstorage.c   |   30 +-
 src/backend/catalog/cdb_external_extensions.sql |   12 +
 src/backend/catalog/heap.c  |   79 +-
 src/backend/cdb/cdbdatalocality.c   |  434 ++-
 src/backend/cdb/cdbpartition.c  |   24 +-
 src/backend/commands/analyze.c  |  417 ++-
 src/backend/commands/copy.c |   22 +-
 src/backend/commands/indexcmds.c|1 +
 src/backend/commands/sequence.c |   22 +-
 src/backend/commands/tablecmds.c|  870 +++---
 src/backend/commands/typecmds.c |   19 +-
 src/backend/commands/user.c |   35 +-
 src/backend/commands/view.c |   19 +-
 src/backend/executor/execDML.c  |   20 +-
 src/backend/nodes/copyfuncs.c   |   40 +-
 src/backend/nodes/equalfuncs.c  |   39 +-
 src/backend/nodes/outfast.c |   40 +-
 src/backend/nodes/outfuncs.c|   43 +-
 src/backend/nodes/readfast.c|   60 +-
 src/backend/nodes/readfuncs.c   |   44 +-
 src/backend/optimizer/plan/createplan.c |   44 +-
 src/backend/optimizer/plan/planner.c|   10 +-
 src/backend/parser/analyze.c| 1098 +-
 src/backend/parser/gram.y   |  178 ++-
 src/backend/tcop/utility.c  |   48 +-
 src/backend/utils/misc/uriparser.c  |   10 +-
 src/include/access/fileam.h |   13 +-
 src/include/access/formatter.h  |   10 +
 src/include/access/plugstorage.h|6 +-
 src/include/catalog/pg_exttable.h   |7 +-
 src/include/cdb/cdbdatalocality.h   |   11 +-
 src/include/commands/tablecmds.h|4 +-
 src/include/nodes/parsenodes.h  |  118 +-
 src/include/parser/analyze.h|   50 +-
 src/include/utils/uri.h |5 +-
 41 files changed, 4016 insertions(+), 1150 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/1c189fc1/contrib/Makefile
--
diff --git a/contrib/Makefile b/contrib/Makefile
index 695e92a..e5daff9 100644
--- a/contrib/Makefile
+++ b/contrib/Makefile
@@ -9,6 +9,7 @@ WANTED_DIRS = \
extprotocol \
gp_cancel_query \
formatter_fixedwidth \
+   exthdfs\
hawq-hadoop
 
 ifeq ($(with_pgcrypto), yes)

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/1c189fc1/contrib/exthdfs/Makefile
--
diff --git a/contrib/exthdfs/Makefile b/contrib/exthdfs/Makefile
new file mode 100644
index 000..e247664
--- /dev/null
+++ b/contrib/exthdfs/Makefile
@@ -0,0 +1,34 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+MODULE_big = exthdfs
+OBJS   = exthdfs.o
+
+PG_CPPFLAGS = -I$(libpq_srcdir)
+PG_LIBS = $(libpq_pgport)
+
+over

[1/4] incubator-hawq git commit: Add hdfs protocol for pluggable storage framework

2018-08-13 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 9f33d8dd0 -> 1c189fc12


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/1c189fc1/src/include/cdb/cdbdatalocality.h
--
diff --git a/src/include/cdb/cdbdatalocality.h 
b/src/include/cdb/cdbdatalocality.h
index c3753ce..0afc45c 100644
--- a/src/include/cdb/cdbdatalocality.h
+++ b/src/include/cdb/cdbdatalocality.h
@@ -32,6 +32,7 @@
 #include "catalog/gp_policy.h"
 #include "nodes/parsenodes.h"
 #include "executor/execdesc.h"
+#include "catalog/pg_exttable.h"
 
 /*
  * structure containing information about data residence
@@ -71,12 +72,18 @@ typedef struct VirtualSegmentNode
char *hostname;
 } VirtualSegmentNode;
 
+typedef struct blocklocation_file{
+   BlockLocation *locations;
+   int block_num;
+   char *file_uri;
+}blocklocation_file;
+
 /*
  * calculate_planner_segment_num: based on the parse tree,
  * we calculate the appropriate planner segment_num.
  */
-SplitAllocResult * calculate_planner_segment_num(Query *query, 
QueryResourceLife resourceLife,
-List *rtable, GpPolicy 
*intoPolicy, int sliceNum, int fixedVsegNum);
+SplitAllocResult * calculate_planner_segment_num(PlannedStmt *plannedstmt, 
Query *query,
+   QueryResourceLife resourceLife, int fixedVsegNum);
 
 /*
  * udf_collector_walker: the routine to file udfs.

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/1c189fc1/src/include/commands/tablecmds.h
--
diff --git a/src/include/commands/tablecmds.h b/src/include/commands/tablecmds.h
index 8404a7f..e23df2f 100644
--- a/src/include/commands/tablecmds.h
+++ b/src/include/commands/tablecmds.h
@@ -52,7 +52,7 @@ typedef struct AttrMapContext{
 
 extern const char *synthetic_sql;
 
-extern Oid DefineRelation(CreateStmt *stmt, char relkind, char relstorage);
+extern Oid DefineRelation(CreateStmt *stmt, char relkind, char relstorage, 
const char *formattername);
 
 extern voidDefineExternalRelation(CreateExternalStmt *stmt);
 
@@ -89,6 +89,8 @@ extern void CheckTableNotInUse(Relation rel, const char 
*stmt);
 
 extern void ExecuteTruncate(TruncateStmt *stmt);
 
+
+
 extern void renameatt(Oid myrelid,
  const char *oldattname,
  const char *newattname,

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/1c189fc1/src/include/nodes/parsenodes.h
--
diff --git a/src/include/nodes/parsenodes.h b/src/include/nodes/parsenodes.h
index a9ca4a0..173a35c 100644
--- a/src/include/nodes/parsenodes.h
+++ b/src/include/nodes/parsenodes.h
@@ -1354,14 +1354,15 @@ typedef struct GrantRoleStmt
  * Node that represents the single row error handling (SREH) clause.
  * used in COPY and External Tables.
  */
-typedef struct SingleRowErrorDesc
-{
-   NodeTag type;
-   RangeVar*errtable;  /* error table for data 
format errors */
-   int rejectlimit;/* per segment error 
reject limit */
-   boolis_keep;/* true if KEEP 
indicated (COPY only) */
-   boolis_limit_in_rows;   /* true for ROWS false for 
PERCENT */
-   boolreusing_existing_errtable;  /* var used later in 
trasform... */
+typedef struct SingleRowErrorDesc {
+  NodeTag type;
+  RangeVar *errtable; /* error table for data format errors */
+  int rejectlimit;/* per segment error reject limit */
+  bool is_keep;   /* true if KEEP indicated (COPY only) */
+  bool is_limit_in_rows;  /* true for ROWS false for PERCENT */
+  bool reusing_existing_errtable; /* var used later in trasform... */
+  bool is_hdfs_protocol_text; /* hdfs protocol text format table */
+  char *hdfsLoc; /* error table location for hdfs protocol text only */
 } SingleRowErrorDesc;
 
 /* --
@@ -1403,37 +1404,40 @@ typedef struct CopyStmt
  * implementation).
  * --
  */
-
-typedef struct CreateStmt
-{
-   NodeTag type;
-   RangeVar   *relation;   /* relation to create */
-   List   *tableElts;  /* column definitions (list of 
ColumnDef) */
-   List   *inhRelations;   /* relations to inherit from (list of
-* inhRelation) 
*/
-   List   *constraints;/* constraints (list of Constraint 
nodes) */
-   List   *options;/* options from WITH clause */
-   OnCommitAction oncommit;/* what do we do at COMMIT? */
-   char   *tablespacename; /* table space to use, or NULL */
-   List   *distributedBy;   /* what columns we distribute the data by 
*/
-   Node   *partitionBy;

incubator-hawq git commit: HAWQ-1636. Fix compile apache hawq failure due to unsupported syntax in libyarn on osx 10.11

2018-07-04 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 21991077c -> 845ffe875


HAWQ-1636. Fix compile apache hawq failure due to unsupported syntax in libyarn 
on osx 10.11


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/845ffe87
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/845ffe87
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/845ffe87

Branch: refs/heads/master
Commit: 845ffe87576587d4591894a77da531cef5e85fb3
Parents: 2199107
Author: oushu1wangziming1 
Authored: Thu Jul 5 10:12:27 2018 +0800
Committer: oushu1wangziming1 
Committed: Thu Jul 5 10:13:05 2018 +0800

--
 depends/libyarn/src/libyarnclient/ApplicationClient.cpp | 4 ++--
 depends/libyarn/src/libyarnclient/ApplicationMaster.cpp | 4 ++--
 2 files changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/845ffe87/depends/libyarn/src/libyarnclient/ApplicationClient.cpp
--
diff --git a/depends/libyarn/src/libyarnclient/ApplicationClient.cpp 
b/depends/libyarn/src/libyarnclient/ApplicationClient.cpp
index 819514f..b307624 100644
--- a/depends/libyarn/src/libyarnclient/ApplicationClient.cpp
+++ b/depends/libyarn/src/libyarnclient/ApplicationClient.cpp
@@ -73,10 +73,10 @@ ApplicationClient::ApplicationClient(string &user, string 
&host, string &port) {
 rmConfInfos = RMInfo::getHARMInfo(*conf, YARN_RESOURCEMANAGER_HA);
 
 /* build a list of candidate RMs without duplicate */
-for (vector::iterator it = rmConfInfos.begin();
+for (std::vector::iterator it = rmConfInfos.begin();
 it != rmConfInfos.end(); it++) {
 bool found = false;
-for (vector::iterator it2 = rmInfos.begin();
+for (std::vector::iterator it2 = rmInfos.begin();
 it2 != rmInfos.end(); it2++) {
 if (it2->getHost() == it->getHost()
 && it2->getPort() == it->getPort()) {

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/845ffe87/depends/libyarn/src/libyarnclient/ApplicationMaster.cpp
--
diff --git a/depends/libyarn/src/libyarnclient/ApplicationMaster.cpp 
b/depends/libyarn/src/libyarnclient/ApplicationMaster.cpp
index 964ac0e..743de86 100644
--- a/depends/libyarn/src/libyarnclient/ApplicationMaster.cpp
+++ b/depends/libyarn/src/libyarnclient/ApplicationMaster.cpp
@@ -44,10 +44,10 @@ ApplicationMaster::ApplicationMaster(string &schedHost, 
string &schedPort,
 rmConfInfos = RMInfo::getHARMInfo(*conf, 
YARN_RESOURCEMANAGER_SCHEDULER_HA);
 
 /* build a list of candidate RMs without duplicate */
-for (vector::iterator it = rmConfInfos.begin();
+for (std::vector::iterator it = rmConfInfos.begin();
 it != rmConfInfos.end(); it++) {
 bool found = false;
-for (vector::iterator it2 = rmInfos.begin();
+for (std::vector::iterator it2 = rmInfos.begin();
 it2 != rmInfos.end(); it2++) {
 if (it2->getHost() == it->getHost()
 && it2->getPort() == it->getPort()) {



incubator-hawq git commit: HAWQ-1636. Fix compile apache hawq failure due to unsupported syntax in libyarn on osx 10.11

2018-07-04 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/HAWQ-1636 [created] 845ffe875


HAWQ-1636. Fix compile apache hawq failure due to unsupported syntax in libyarn 
on osx 10.11


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/845ffe87
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/845ffe87
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/845ffe87

Branch: refs/heads/HAWQ-1636
Commit: 845ffe87576587d4591894a77da531cef5e85fb3
Parents: 2199107
Author: oushu1wangziming1 
Authored: Thu Jul 5 10:12:27 2018 +0800
Committer: oushu1wangziming1 
Committed: Thu Jul 5 10:13:05 2018 +0800

--
 depends/libyarn/src/libyarnclient/ApplicationClient.cpp | 4 ++--
 depends/libyarn/src/libyarnclient/ApplicationMaster.cpp | 4 ++--
 2 files changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/845ffe87/depends/libyarn/src/libyarnclient/ApplicationClient.cpp
--
diff --git a/depends/libyarn/src/libyarnclient/ApplicationClient.cpp 
b/depends/libyarn/src/libyarnclient/ApplicationClient.cpp
index 819514f..b307624 100644
--- a/depends/libyarn/src/libyarnclient/ApplicationClient.cpp
+++ b/depends/libyarn/src/libyarnclient/ApplicationClient.cpp
@@ -73,10 +73,10 @@ ApplicationClient::ApplicationClient(string &user, string 
&host, string &port) {
 rmConfInfos = RMInfo::getHARMInfo(*conf, YARN_RESOURCEMANAGER_HA);
 
 /* build a list of candidate RMs without duplicate */
-for (vector::iterator it = rmConfInfos.begin();
+for (std::vector::iterator it = rmConfInfos.begin();
 it != rmConfInfos.end(); it++) {
 bool found = false;
-for (vector::iterator it2 = rmInfos.begin();
+for (std::vector::iterator it2 = rmInfos.begin();
 it2 != rmInfos.end(); it2++) {
 if (it2->getHost() == it->getHost()
 && it2->getPort() == it->getPort()) {

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/845ffe87/depends/libyarn/src/libyarnclient/ApplicationMaster.cpp
--
diff --git a/depends/libyarn/src/libyarnclient/ApplicationMaster.cpp 
b/depends/libyarn/src/libyarnclient/ApplicationMaster.cpp
index 964ac0e..743de86 100644
--- a/depends/libyarn/src/libyarnclient/ApplicationMaster.cpp
+++ b/depends/libyarn/src/libyarnclient/ApplicationMaster.cpp
@@ -44,10 +44,10 @@ ApplicationMaster::ApplicationMaster(string &schedHost, 
string &schedPort,
 rmConfInfos = RMInfo::getHARMInfo(*conf, 
YARN_RESOURCEMANAGER_SCHEDULER_HA);
 
 /* build a list of candidate RMs without duplicate */
-for (vector::iterator it = rmConfInfos.begin();
+for (std::vector::iterator it = rmConfInfos.begin();
 it != rmConfInfos.end(); it++) {
 bool found = false;
-for (vector::iterator it2 = rmInfos.begin();
+for (std::vector::iterator it2 = rmInfos.begin();
 it2 != rmInfos.end(); it2++) {
 if (it2->getHost() == it->getHost()
 && it2->getPort() == it->getPort()) {



[incubator-hawq] Git Push Summary

2018-02-20 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/HAWQ-1590 [deleted] 8248bf950


[incubator-hawq] Git Push Summary

2018-02-20 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/HAWQ-1589 [deleted] 6387bcf04


incubator-hawq git commit: HAWQ-1590. bump hawq version to 2.3 in contrib/hawq-ambari-plugin/build.properties for Apache HAWQ 2.3.0.0-incubating Release

2018-02-20 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 6387bcf04 -> a0d45b8e1


HAWQ-1590. bump hawq version to 2.3 in 
contrib/hawq-ambari-plugin/build.properties for Apache HAWQ 2.3.0.0-incubating 
Release


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/a0d45b8e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/a0d45b8e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/a0d45b8e

Branch: refs/heads/master
Commit: a0d45b8e12a28602f1329c0875bc26430b79bc68
Parents: 6387bcf
Author: Ruilong Huo 
Authored: Wed Feb 21 10:34:01 2018 +0800
Committer: Ruilong Huo 
Committed: Wed Feb 21 12:37:23 2018 +0800

--
 contrib/hawq-ambari-plugin/build.properties | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a0d45b8e/contrib/hawq-ambari-plugin/build.properties
--
diff --git a/contrib/hawq-ambari-plugin/build.properties 
b/contrib/hawq-ambari-plugin/build.properties
index 6df37f7..e9b092f 100644
--- a/contrib/hawq-ambari-plugin/build.properties
+++ b/contrib/hawq-ambari-plugin/build.properties
@@ -1,4 +1,4 @@
-hawq.release.version=2.2.0
+hawq.release.version=2.3.0
 hawq.common.services.version=2.0.0
 pxf.release.version=3.2.1
 pxf.common.services.version=3.0.0



incubator-hawq git commit: HAWQ-1589. bump hawq version to 2.3 in pom.xml for Apache HAWQ 2.3.0.0-incubating Release

2018-02-20 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 5feb8c08e -> 6387bcf04


HAWQ-1589. bump hawq version to 2.3 in pom.xml for Apache HAWQ 
2.3.0.0-incubating Release


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/6387bcf0
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/6387bcf0
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/6387bcf0

Branch: refs/heads/master
Commit: 6387bcf04e1c8daf8f3b1622c58e324131f22162
Parents: 5feb8c0
Author: Ruilong Huo 
Authored: Tue Feb 20 21:09:54 2018 +0800
Committer: Ruilong Huo 
Committed: Tue Feb 20 21:12:26 2018 +0800

--
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/6387bcf0/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 872aaa7..0c197da 100644
--- a/pom.xml
+++ b/pom.xml
@@ -22,7 +22,7 @@
  
   org.apache.hawq
   hawq
-  2.2
+  2.3
   pom
 
 



incubator-hawq git commit: HAWQ-1590. bump hawq version to 2.3 in contrib/hawq-ambari-plugin/build.properties for Apache HAWQ 2.3.0.0-incubating Release

2018-02-20 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/HAWQ-1590 [created] 8248bf950


HAWQ-1590. bump hawq version to 2.3 in 
contrib/hawq-ambari-plugin/build.properties for Apache HAWQ 2.3.0.0-incubating 
Release


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/8248bf95
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/8248bf95
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/8248bf95

Branch: refs/heads/HAWQ-1590
Commit: 8248bf950373d2bd05b237205cb191c55ac2a4e7
Parents: 5feb8c0
Author: Ruilong Huo 
Authored: Wed Feb 21 10:34:01 2018 +0800
Committer: Ruilong Huo 
Committed: Wed Feb 21 10:34:01 2018 +0800

--
 contrib/hawq-ambari-plugin/build.properties | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8248bf95/contrib/hawq-ambari-plugin/build.properties
--
diff --git a/contrib/hawq-ambari-plugin/build.properties 
b/contrib/hawq-ambari-plugin/build.properties
index 6df37f7..e9b092f 100644
--- a/contrib/hawq-ambari-plugin/build.properties
+++ b/contrib/hawq-ambari-plugin/build.properties
@@ -1,4 +1,4 @@
-hawq.release.version=2.2.0
+hawq.release.version=2.3.0
 hawq.common.services.version=2.0.0
 pxf.release.version=3.2.1
 pxf.common.services.version=3.0.0



incubator-hawq git commit: HAWQ-1589. bump hawq version to 2.3 in pom.xml for Apache HAWQ 2.3.0.0-incubating Release [Forced Update!]

2018-02-20 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/HAWQ-1589 7ca97a370 -> 6387bcf04 (forced update)


HAWQ-1589. bump hawq version to 2.3 in pom.xml for Apache HAWQ 
2.3.0.0-incubating Release


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/6387bcf0
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/6387bcf0
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/6387bcf0

Branch: refs/heads/HAWQ-1589
Commit: 6387bcf04e1c8daf8f3b1622c58e324131f22162
Parents: 5feb8c0
Author: Ruilong Huo 
Authored: Tue Feb 20 21:09:54 2018 +0800
Committer: Ruilong Huo 
Committed: Tue Feb 20 21:12:26 2018 +0800

--
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/6387bcf0/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 872aaa7..0c197da 100644
--- a/pom.xml
+++ b/pom.xml
@@ -22,7 +22,7 @@
  
   org.apache.hawq
   hawq
-  2.2
+  2.3
   pom
 
 



incubator-hawq git commit: bump hawq version to 2.3 in pom.xml for Apache HAWQ 2.3.0.0-incubating Release

2018-02-20 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/HAWQ-1589 [created] 7ca97a370


bump hawq version to 2.3 in pom.xml for Apache HAWQ 2.3.0.0-incubating Release


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/7ca97a37
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/7ca97a37
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/7ca97a37

Branch: refs/heads/HAWQ-1589
Commit: 7ca97a370764a8e0b7da05131ce1512a2d780750
Parents: 5feb8c0
Author: Ruilong Huo 
Authored: Tue Feb 20 21:09:54 2018 +0800
Committer: Ruilong Huo 
Committed: Tue Feb 20 21:09:54 2018 +0800

--
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7ca97a37/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 872aaa7..0c197da 100644
--- a/pom.xml
+++ b/pom.xml
@@ -22,7 +22,7 @@
  
   org.apache.hawq
   hawq
-  2.2
+  2.3
   pom
 
 



incubator-hawq git commit: HAWQ-1566. Include Pluggable Storage Format Framework in External Table Insert

2018-01-01 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 76e38c53b -> 260750758


HAWQ-1566. Include Pluggable Storage Format Framework in External Table Insert

Add the external table insert and copy from(write into external table) related 
feature here.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/26075075
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/26075075
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/26075075

Branch: refs/heads/master
Commit: 260750758e33177b4b94ae020441d33c9e8373e6
Parents: 76e38c5
Author: Chiyang Wan 
Authored: Tue Jan 2 09:23:16 2018 +0800
Committer: Chiyang Wan 
Committed: Tue Jan 2 09:25:30 2018 +0800

--
 src/backend/access/external/fileam.c   |  24 +++--
 src/backend/commands/copy.c| 130 +---
 src/backend/executor/execDML.c |  96 ++--
 src/backend/executor/execMain.c|  33 ++-
 src/include/access/fileam.h|  17 +++-
 src/include/access/plugstorage.h   |  33 +--
 src/include/access/plugstorage_utils.h |  33 +++
 7 files changed, 302 insertions(+), 64 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/26075075/src/backend/access/external/fileam.c
--
diff --git a/src/backend/access/external/fileam.c 
b/src/backend/access/external/fileam.c
index 7b77edd..692a5db 100644
--- a/src/backend/access/external/fileam.c
+++ b/src/backend/access/external/fileam.c
@@ -574,7 +574,8 @@ external_getnext(FileScanDesc scan,
  * this function to initialize our various structures and state..
  */
 ExternalInsertDesc
-external_insert_init(Relation rel, int errAosegno)
+external_insert_init(Relation rel, int errAosegno,
+ ExternalTableType formatterType, char *formatterName)
 {
ExternalInsertDesc  extInsertDesc;
ExtTableEntry*  extentry;
@@ -592,6 +593,8 @@ external_insert_init(Relation rel, int errAosegno)
extInsertDesc->ext_rel = rel;
extInsertDesc->ext_noop = (Gp_role == GP_ROLE_DISPATCH);
extInsertDesc->ext_formatter_data = NULL;
+   extInsertDesc->ext_formatter_type = formatterType;
+   extInsertDesc->ext_formatter_name = formatterName;
 
if(extentry->command)
{
@@ -682,9 +685,10 @@ external_insert_init(Relation rel, int errAosegno)
  *
  */
 Oid
-external_insert(ExternalInsertDesc extInsertDesc, HeapTuple instup)
+external_insert(ExternalInsertDesc extInsertDesc, TupleTableSlot *tupTableSlot)
 {
 
+   HeapTuple   instup = ExecFetchSlotHeapTuple(tupTableSlot);
TupleDesc   tupDesc = extInsertDesc->ext_tupDesc;
Datum*  values = extInsertDesc->ext_values;
bool*   nulls = extInsertDesc->ext_nulls;
@@ -755,12 +759,15 @@ external_insert(ExternalInsertDesc extInsertDesc, 
HeapTuple instup)
CopyOneCustomRowTo(pstate, b);
}
 
-   /* Write the data into the external source */
-   external_senddata((URL_FILE*)extInsertDesc->ext_file, pstate);
+   if (extInsertDesc->ext_formatter_data == NULL)
+   {
+   /* Write the data into the external source */
+   external_senddata((URL_FILE*)extInsertDesc->ext_file, pstate);
 
-   /* Reset our buffer to start clean next round */
-   pstate->fe_msgbuf->len = 0;
-   pstate->fe_msgbuf->data[0] = '\0';
+   /* Reset our buffer to start clean next round */
+   pstate->fe_msgbuf->len = 0;
+   pstate->fe_msgbuf->data[0] = '\0';
+   }
pstate->processed++;
 
return HeapTupleGetOid(instup);
@@ -792,6 +799,9 @@ external_insert_finish(ExternalInsertDesc extInsertDesc)
if(extInsertDesc->ext_formatter_data)
pfree(extInsertDesc->ext_formatter_data);
 
+   if(extInsertDesc->ext_formatter_name)
+   pfree(extInsertDesc->ext_formatter_name);
+
pfree(extInsertDesc);
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/26075075/src/backend/commands/copy.c
--
diff --git a/src/backend/commands/copy.c b/src/backend/commands/copy.c
index d500d11..6e08bf0 100644
--- a/src/backend/commands/copy.c
+++ b/src/backend/commands/copy.c
@@ -32,6 +32,7 @@
  *-
  */
 #include "postgres.h"
+#include "fmgr.h"
 
 #include 
 #include 
@@ -46,10 +47,12 @@
 #include "access/aosegfiles.h"
 #include "access/appendonlywriter.h"
 #include "access/xact.h"
+#include "access/plugstorage.h"
 #include "catalog/gp_policy.h"
 #include "catalog/namespace.h"
 #inc

incubator-hawq git commit: HAWQ-1565. Include Pluggable Storage Format Framework in External Table Scan

2017-12-27 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 85fd30570 -> 76e38c53b


HAWQ-1565. Include Pluggable Storage Format Framework in External Table Scan

Rewrite the tuple construct and consume working flow in the external table, 
which leads to data copy cost.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/76e38c53
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/76e38c53
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/76e38c53

Branch: refs/heads/master
Commit: 76e38c53b9377a055e6a2db6f63dc2e984c25025
Parents: 85fd305
Author: Chiyang Wan 
Authored: Thu Dec 7 23:05:40 2017 +0800
Committer: Ruilong Huo 
Committed: Thu Dec 28 14:58:21 2017 +0800

--
 src/backend/access/external/fileam.c|  83 +++---
 src/backend/executor/nodeExternalscan.c | 228 +++
 src/include/access/fileam.h |  18 ++-
 3 files changed, 267 insertions(+), 62 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/76e38c53/src/backend/access/external/fileam.c
--
diff --git a/src/backend/access/external/fileam.c 
b/src/backend/access/external/fileam.c
index 6a59b95..7b77edd 100644
--- a/src/backend/access/external/fileam.c
+++ b/src/backend/access/external/fileam.c
@@ -137,11 +137,24 @@ static FILE *g_dataSource = NULL;
 * 
 */
 FileScanDesc
-external_beginscan(Relation relation, Index scanrelid, uint32 scancounter,
-  List *uriList, List *fmtOpts, char fmtType, bool 
isMasterOnly,
-  int rejLimit, bool rejLimitInRows, Oid fmterrtbl, 
ResultRelSegFileInfo *segfileinfo, int encoding,
-  List *scanquals)
+external_beginscan(ExternalScan *extScan,
+   Relation relation,
+   ResultRelSegFileInfo *segFileInfo,
+   int formatterType,
+   char *formatterName)
 {
+   Index scanrelid = extScan->scan.scanrelid;
+   uint32 scancounter = extScan->scancounter;
+   List *uriList = extScan->uriList;
+   List *fmtOpts = extScan->fmtOpts;
+   char fmtType = extScan->fmtType;
+   bool isMasterOnly = extScan->isMasterOnly;
+   int rejLimit = extScan->rejLimit;
+   bool rejLimitInRows = extScan->rejLimitInRows;
+   Oid fmterrtbl = extScan->fmterrtbl;
+   int encoding = extScan->encoding;
+   List *scanquals = extScan->scan.plan.qual;
+
FileScanDesc scan;
TupleDesc   tupDesc = NULL;
int attnum;
@@ -174,6 +187,9 @@ external_beginscan(Relation relation, Index scanrelid, 
uint32 scancounter,
scan->fs_file = NULL;
scan->fs_formatter = NULL;
 
+   scan->fs_formatter_type = formatterType;
+   scan->fs_formatter_name = formatterName;
+
/*
 * get the external URI assigned to us.
 *
@@ -229,6 +245,7 @@ external_beginscan(Relation relation, Index scanrelid, 
uint32 scancounter,
/* set external source (uri) */
scan->fs_uri = uri;
 
+   elog(LOG, "fs_uri (%d) is set as %s", segindex, uri);
/* NOTE: we delay actually opening the data source until 
external_getnext() */
}
else
@@ -272,14 +289,15 @@ external_beginscan(Relation relation, Index scanrelid, 
uint32 scancounter,
 
/* Initialize all the parsing and state variables */
InitParseState(scan->fs_pstate, relation, NULL, NULL, false, fmtOpts, 
fmtType,
-  scan->fs_uri, rejLimit, rejLimitInRows, 
fmterrtbl, segfileinfo, encoding);
+  scan->fs_uri, rejLimit, rejLimitInRows, fmterrtbl, 
segFileInfo, encoding);
 
-   if(fmttype_is_custom(fmtType))
-   {
-   scan->fs_formatter = (FormatterData *) palloc0 
(sizeof(FormatterData));
-   initStringInfo(&scan->fs_formatter->fmt_databuf);
-   scan->fs_formatter->fmt_perrow_ctx = 
scan->fs_pstate->rowcontext;
-   }
+   /*
+* We always have custom formatter
+*/
+   scan->fs_formatter = (FormatterData *) palloc0 (sizeof(FormatterData));
+   initStringInfo(&scan->fs_formatter->fmt_databuf);
+   scan->fs_formatter->fmt_perrow_ctx = scan->fs_pstate->rowcontext;
+   scan->fs_formatter->fmt_user_ctx = NULL;
 
/* Set up callback to identify error line number */
scan->errcontext.callback = external_scan_error_callback;
@@ -391,6 +409,15 @@ external_endscan(FileScanDesc scan)
}
 
/*
+* free formatter name
+*/
+   if (scan->fs_formatter_name)
+   {
+   pfree(scan->fs_formatter_name);
+   scan->fs_formatter_

incubator-hawq git commit: HAWQ-1564. Add Pluggable Storage Dependent Information

2017-12-07 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 9578ab04c -> 816782bd8


HAWQ-1564. Add Pluggable Storage Dependent Information

The info added are mainly about external URI, block location, file splits and 
formatter action.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/816782bd
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/816782bd
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/816782bd

Branch: refs/heads/master
Commit: 816782bd84c5ac001a86afa907a81794530e3433
Parents: 9578ab0
Author: Chiyang Wan 
Authored: Tue Dec 5 09:57:02 2017 +0800
Committer: Chiyang Wan 
Committed: Wed Dec 6 12:35:34 2017 +0800

--
 src/backend/access/external/fileam.c | 26 +
 src/backend/access/external/url.c| 32 +--
 src/backend/nodes/copyfuncs.c|  1 +
 src/backend/nodes/outfast.c  |  1 +
 src/backend/nodes/outfuncs.c |  1 +
 src/backend/nodes/readfast.c |  1 +
 src/include/access/extprotocol.h | 22 +++--
 src/include/access/filesplit.h   |  1 +
 src/include/access/formatter.h   |  7 ++-
 src/include/access/relscan.h | 15 +++
 src/include/access/url.h |  2 +-
 11 files changed, 87 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/816782bd/src/backend/access/external/fileam.c
--
diff --git a/src/backend/access/external/fileam.c 
b/src/backend/access/external/fileam.c
index 099dae5..6a59b95 100644
--- a/src/backend/access/external/fileam.c
+++ b/src/backend/access/external/fileam.c
@@ -79,7 +79,7 @@
 #include "cdb/cdbutil.h"
 #include "cdb/cdbvars.h"
 
-static HeapTuple externalgettup(FileScanDesc scan, ScanDirection dir, 
ExternalSelectDesc desc);
+static HeapTuple externalgettup(FileScanDesc scan, ScanDirection dir, 
ExternalSelectDesc desc, ScanState *ss);
 static void InitParseState(CopyState pstate, Relation relation,
   Datum* values, bool* nulls, 
bool writable,
   List *fmtOpts, char fmtType,
@@ -97,7 +97,7 @@ static void 
FunctionCallPrepareFormatter(FunctionCallInfoData*fcinfo,
 
 static void open_external_readable_source(FileScanDesc scan);
 static void open_external_writable_source(ExternalInsertDesc extInsertDesc);
-static int external_getdata(URL_FILE *extfile, CopyState pstate, int 
maxread, ExternalSelectDesc desc);
+static int external_getdata(URL_FILE *extfile, CopyState pstate, int 
maxread, ExternalSelectDesc desc, ScanState *ss);
 static void external_senddata(URL_FILE *extfile, CopyState pstate);
 static void external_scan_error_callback(void *arg);
 void readHeaderLine(CopyState pstate);
@@ -487,6 +487,7 @@ HeapTuple
 external_getnext(FileScanDesc scan, ScanDirection direction, 
ExternalSelectDesc desc)
 {
HeapTuple   tuple;
+   ScanState *ss = NULL; /* a temporary dummy for the following steps */
 
if (scan->fs_noop)
return NULL;
@@ -508,7 +509,7 @@ external_getnext(FileScanDesc scan, ScanDirection 
direction, ExternalSelectDesc
/* Note: no locking manipulations needed */
FILEDEBUG_1;
 
-   tuple = externalgettup(scan, direction, desc);
+   tuple = externalgettup(scan, direction, desc, ss);
 
 
if (tuple == NULL)
@@ -991,7 +992,7 @@ static DataLineStatus parse_next_line(FileScanDesc scan)
 }
 
 static HeapTuple
-externalgettup_defined(FileScanDesc scan, ExternalSelectDesc desc)
+externalgettup_defined(FileScanDesc scan, ExternalSelectDesc desc, ScanState 
*ss)
 {
HeapTuple   tuple = NULL;
CopyState   pstate = scan->fs_pstate;
@@ -1003,7 +1004,7 @@ externalgettup_defined(FileScanDesc scan, 
ExternalSelectDesc desc)
/* need to fill our buffer with data? */
if (pstate->raw_buf_done)
{
-   pstate->bytesread = 
external_getdata((URL_FILE*)scan->fs_file, pstate, RAW_BUF_SIZE, desc);
+   pstate->bytesread = 
external_getdata((URL_FILE*)scan->fs_file, pstate, RAW_BUF_SIZE, desc, ss);
pstate->begloc = pstate->raw_buf;
pstate->raw_buf_done = (pstate->bytesread==0);
pstate->raw_buf_index = 0;
@@ -1094,7 +1095,7 @@ externalgettup_defined(FileScanDesc scan, 
ExternalSelectDesc desc)
 }
 
 static HeapTuple
-externalgettup_custom(FileScanDesc scan, ExternalSelectDesc desc)
+externalgettup_custom(FileScanDesc scan, ExternalSelectDesc desc, ScanState 
*ss

incubator-hawq git commit: HAWQ-1555. Add access interfaces for protocol and format in pluggable storage framework

2017-12-01 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 5e152951d -> 96c13f7ba


HAWQ-1555. Add access interfaces for protocol and format in pluggable storage 
framework


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/96c13f7b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/96c13f7b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/96c13f7b

Branch: refs/heads/master
Commit: 96c13f7bacf46e088114dfbf38ad8006e0dfc92f
Parents: 5e15295
Author: Chiyang Wan 
Authored: Tue Nov 28 09:16:48 2017 +0800
Committer: Ruilong Huo 
Committed: Fri Dec 1 16:52:13 2017 +0800

--
 src/backend/access/external/Makefile  |   3 +-
 src/backend/access/external/fileam.c  |  37 ++
 src/backend/access/external/plugstorage.c | 553 +
 src/include/access/fileam.h   |   2 +
 src/include/access/plugstorage.h  | 222 ++
 src/include/access/plugstorage_utils.h|  59 +++
 src/include/nodes/nodes.h |   3 +
 7 files changed, 878 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/96c13f7b/src/backend/access/external/Makefile
--
diff --git a/src/backend/access/external/Makefile 
b/src/backend/access/external/Makefile
index bc043dc..cc52f2f 100644
--- a/src/backend/access/external/Makefile
+++ b/src/backend/access/external/Makefile
@@ -27,7 +27,8 @@ top_builddir = ../../../..
 include $(top_builddir)/src/Makefile.global
 
 OBJS = fileam.o url.o libchurl.o hd_work_mgr.o pxfuriparser.o pxfheaders.o \
-pxfmasterapi.o ha_config.o pxfcomutils.o pxfutils.o pxffilters.o pxfanalyze.o
+pxfmasterapi.o ha_config.o pxfcomutils.o pxfutils.o pxffilters.o pxfanalyze.o \
+plugstorage.o
 
 include $(top_srcdir)/src/backend/common.mk
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/96c13f7b/src/backend/access/external/fileam.c
--
diff --git a/src/backend/access/external/fileam.c 
b/src/backend/access/external/fileam.c
index f77b29e..099dae5 100644
--- a/src/backend/access/external/fileam.c
+++ b/src/backend/access/external/fileam.c
@@ -2310,6 +2310,43 @@ strtokx2(const char *s,
return start;
 }
 
+char *getExtTblFormatterTypeInFmtOptsStr(char *fmtStr)
+{
+   const char  *whitespace = " \t\n\r";
+   const char  *quote = "'";
+   int encoding = GetDatabaseEncoding();
+
+   char *key = strtokx2(fmtStr, whitespace, NULL, NULL,
+0, false, true, encoding);
+   char *val = strtokx2(NULL, whitespace, NULL, quote,
+0, false, true, encoding);
+
+   while (key && val)
+   {
+   if (pg_strncasecmp(key, "formatter", strlen("formatter")) == 0)
+   {
+   return pstrdup(val);
+   }
+
+   key = strtokx2(NULL, whitespace, NULL, NULL,
+  0, false, false, encoding);
+   val = strtokx2(NULL, whitespace, NULL, quote,
+  0, false, true, encoding);
+   }
+
+   return NULL;
+}
+
+char *getExtTblFormatterTypeInFmtOptsList(List *fmtOpts)
+{
+   /* formatter always is at the begin the fmtOpts */
+   char *formatterStr = pstrdup((char *) strVal(linitial(fmtOpts)));
+   char *formatterName = getExtTblFormatterTypeInFmtOptsStr(formatterStr);
+   pfree(formatterStr);
+
+   return formatterName;
+}
+
 /*
  * parseFormatString
  *

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/96c13f7b/src/backend/access/external/plugstorage.c
--
diff --git a/src/backend/access/external/plugstorage.c 
b/src/backend/access/external/plugstorage.c
new file mode 100644
index 000..21dd51e
--- /dev/null
+++ b/src/backend/access/external/plugstorage.c
@@ -0,0 +1,553 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissi

incubator-hawq-site git commit: Correct download page to reflect Apache HAWQ 2.2.0.0-incubating release

2017-07-11 Thread huor
Repository: incubator-hawq-site
Updated Branches:
  refs/heads/asf-site a6ab6fdbf -> 5832adbbb


Correct download page to reflect Apache HAWQ 2.2.0.0-incubating release


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq-site/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-hawq-site/commit/5832adbb
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq-site/tree/5832adbb
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq-site/diff/5832adbb

Branch: refs/heads/asf-site
Commit: 5832adbbb02e1b85facb02bb39cab97f518d711a
Parents: a6ab6fd
Author: Ruilong Huo 
Authored: Tue Jul 11 23:17:36 2017 +0800
Committer: Ruilong Huo 
Committed: Tue Jul 11 23:17:36 2017 +0800

--
 index.html | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq-site/blob/5832adbb/index.html
--
diff --git a/index.html b/index.html
index c60b1a6..4a61fa3 100644
--- a/index.html
+++ b/index.html
@@ -330,6 +330,8 @@
   http://www-eu.apache.org/dist/incubator/hawq/2.2.0.0-incubating/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.sha256";>SHA-256
 |
   http://www-eu.apache.org/dist/incubator/hawq/2.2.0.0-incubating/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.md5";>MD5
 
+  
+  
 http://apache.org/dyn/closer.cgi/incubator/hawq/2.2.0.0-incubating/apache-hawq-src-2.2.0.0-incubating.tar.gz";
 class="type">Source
 
   Version 2.2.0.0 |



incubator-hawq-site git commit: Update download page to reflect Apache HAWQ 2.2.0.0-incubating release

2017-07-11 Thread huor
Repository: incubator-hawq-site
Updated Branches:
  refs/heads/asf-site 4f92c2471 -> a6ab6fdbf


Update download page to reflect Apache HAWQ 2.2.0.0-incubating release


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq-site/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-hawq-site/commit/a6ab6fdb
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq-site/tree/a6ab6fdb
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq-site/diff/a6ab6fdb

Branch: refs/heads/asf-site
Commit: a6ab6fdbfc2e27d245e2f4e0a607c5fa08e3c363
Parents: 4f92c24
Author: Ruilong Huo 
Authored: Tue Jul 11 23:10:34 2017 +0800
Committer: Ruilong Huo 
Committed: Tue Jul 11 23:10:34 2017 +0800

--
 index.html | 39 ++-
 1 file changed, 38 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq-site/blob/a6ab6fdb/index.html
--
diff --git a/index.html b/index.html
index 667b7af..c60b1a6 100644
--- a/index.html
+++ b/index.html
@@ -86,7 +86,7 @@
   
 
   
-DOWNLOAD2.1.0.0 (Feb. 28th, 2017)
+DOWNLOAD2.2.0.0 (July 12, 2017)
   
 
 
@@ -310,6 +310,43 @@
   
 
   
+Download HAWQ 2.2.0.0-incubating
+  
+
+  
+
+
+
+  
+
+  
+
+
+  
+http://apache.org/dyn/closer.cgi/incubator/hawq/2.2.0.0-incubating/apache-hawq-rpm-2.2.0.0-incubating.tar.gz";
 class="type">Binary
+
+  Version 2.2.0.0 |
+  http://www-eu.apache.org/dist/incubator/hawq/2.2.0.0-incubating/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.asc";>PGP
 |
+  http://www-eu.apache.org/dist/incubator/hawq/2.2.0.0-incubating/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.sha256";>SHA-256
 |
+  http://www-eu.apache.org/dist/incubator/hawq/2.2.0.0-incubating/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.md5";>MD5
+
+http://apache.org/dyn/closer.cgi/incubator/hawq/2.2.0.0-incubating/apache-hawq-src-2.2.0.0-incubating.tar.gz";
 class="type">Source
+
+  Version 2.2.0.0 |
+  http://www-eu.apache.org/dist/incubator/hawq/2.2.0.0-incubating/apache-hawq-src-2.2.0.0-incubating.tar.gz.asc";>PGP
 |
+  http://www-eu.apache.org/dist/incubator/hawq/2.2.0.0-incubating/apache-hawq-src-2.2.0.0-incubating.tar.gz.sha256";>SHA-256
 |
+  http://www-eu.apache.org/dist/incubator/hawq/2.2.0.0-incubating/apache-hawq-src-2.2.0.0-incubating.tar.gz.md5";>MD5
+
+  
+
+
+  
+
+
+
+  
+
+  
 Download HAWQ 2.1.0.0-incubating
   
 



svn commit: r20397 - /release/incubator/hawq/2.2.0.0-incubating/

2017-07-10 Thread huor
Author: huor
Date: Tue Jul 11 04:58:25 2017
New Revision: 20397

Log:
Release Apache HAWQ (incubating) 2.2.0.0

Added:
release/incubator/hawq/2.2.0.0-incubating/

release/incubator/hawq/2.2.0.0-incubating/apache-hawq-rpm-2.2.0.0-incubating.tar.gz
   (with props)

release/incubator/hawq/2.2.0.0-incubating/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.asc

release/incubator/hawq/2.2.0.0-incubating/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.md5

release/incubator/hawq/2.2.0.0-incubating/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.sha256

release/incubator/hawq/2.2.0.0-incubating/apache-hawq-src-2.2.0.0-incubating.tar.gz
   (with props)

release/incubator/hawq/2.2.0.0-incubating/apache-hawq-src-2.2.0.0-incubating.tar.gz.asc

release/incubator/hawq/2.2.0.0-incubating/apache-hawq-src-2.2.0.0-incubating.tar.gz.md5

release/incubator/hawq/2.2.0.0-incubating/apache-hawq-src-2.2.0.0-incubating.tar.gz.sha256

Added: 
release/incubator/hawq/2.2.0.0-incubating/apache-hawq-rpm-2.2.0.0-incubating.tar.gz
==
Binary file - no diff available.

Propchange: 
release/incubator/hawq/2.2.0.0-incubating/apache-hawq-rpm-2.2.0.0-incubating.tar.gz
--
svn:mime-type = application/octet-stream

Added: 
release/incubator/hawq/2.2.0.0-incubating/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.asc
==
--- 
release/incubator/hawq/2.2.0.0-incubating/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.asc
 (added)
+++ 
release/incubator/hawq/2.2.0.0-incubating/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.asc
 Tue Jul 11 04:58:25 2017
@@ -0,0 +1,16 @@
+-BEGIN PGP SIGNATURE-
+
+iQIzBAABCgAdFiEE2+lgW9iO64/ujs2xj+zaiBuLaHIFAllTIXgACgkQj+zaiBuL
+aHIqWw/8Cj+agw4ezoN1T0qOMKRRHrf1jVHD1nxVWLFtj01aioV/zWrllvPR6RsM
+T1f4bIGd5/aCkZ/1o2tPxchjxmUr9IroN68nHItZ/Kr4RKG9rIPa+YzREegacT7V
+poUeV0Rsyvdl15Sck51IudpMzLiKIyLx4kuyR4nr0gE4ywi08HYMCuGmwgRcOkjE
+bzsggolIOPosZp9zswxnPDbpmjE6DzvExyYdkIMmzzAkChWx/tb7jSp+6F2DJq/2
+w0FGTVga1LstyVyaeUhMHTrR0eqYiijGZlu7WJW8iX0t/2+osMuyGAooUblU7hIl
+rPDm4pf+7RjR19p4X7OMU9YYhT6xhiaGvEawtBR7CY1LGmsy77w8sXJHZAlgaAoK
+X9n5s0pibGHkDHon6ojEYSuZ0aFHKF/ahMjRi1av8++gyZr4UAEVjib5UQre14NA
+uk2gV3pFXgjldYGyV4gFXmBBk1wGN6pDyPaTujtLJ0GZSwy8A493C+C6vkLP188+
+biIFC4wqYHHi72r6ZTEfWdm0VqQyekF9AvupsOBKdjhBVmo2i/Smh3zJmWgrRCER
+yt1iAqXxy0PqQjLypV+R2CBbiSE39PqfCiTvUBrC3xgq2gShprBZeARI6ahNR3Se
+F2HieJ1a5hxc8SQyLkDtr+kxqtQQrZaO7l4/pdw9R1dp133oiCI=
+=vAKp
+-END PGP SIGNATURE-

Added: 
release/incubator/hawq/2.2.0.0-incubating/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.md5
==
--- 
release/incubator/hawq/2.2.0.0-incubating/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.md5
 (added)
+++ 
release/incubator/hawq/2.2.0.0-incubating/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.md5
 Tue Jul 11 04:58:25 2017
@@ -0,0 +1 @@
+MD5 (apache-hawq-rpm-2.2.0.0-incubating.tar.gz) = 
8624f6cab77657ce5af8ac31ce5b6716

Added: 
release/incubator/hawq/2.2.0.0-incubating/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.sha256
==
--- 
release/incubator/hawq/2.2.0.0-incubating/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.sha256
 (added)
+++ 
release/incubator/hawq/2.2.0.0-incubating/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.sha256
 Tue Jul 11 04:58:25 2017
@@ -0,0 +1 @@
+0b104c5ca867a08c9465bd343bed731ab784c1b3f2efdab8a1ba50189c990fba  
apache-hawq-rpm-2.2.0.0-incubating.tar.gz

Added: 
release/incubator/hawq/2.2.0.0-incubating/apache-hawq-src-2.2.0.0-incubating.tar.gz
==
Binary file - no diff available.

Propchange: 
release/incubator/hawq/2.2.0.0-incubating/apache-hawq-src-2.2.0.0-incubating.tar.gz
--
svn:mime-type = application/octet-stream

Added: 
release/incubator/hawq/2.2.0.0-incubating/apache-hawq-src-2.2.0.0-incubating.tar.gz.asc
==
--- 
release/incubator/hawq/2.2.0.0-incubating/apache-hawq-src-2.2.0.0-incubating.tar.gz.asc
 (added)
+++ 
release/incubator/hawq/2.2.0.0-incubating/apache-hawq-src-2.2.0.0-incubating.tar.gz.asc
 Tue Jul 11 04:58:25 2017
@@ -0,0 +1,16 @@
+-BEGIN PGP SIGNATURE-
+
+iQIzBAABCgAdFiEE2+lgW9iO64/ujs2xj+zaiBuLaHIFAllOBQoACgkQj+zaiBuL
+aHIiMRAAn0Z6aOZehVImpw0xgHf235uYhM7+vaUrQ+Y0JiaAgwafWxJ8VJITb74+
+1slnZkyrkbW95JNSA5TnzApr2ygKBgMtVdR+WiEEVhJbwdEH5LiCa3jUw55u3fmg
+1taQJZpFXqvLoyljUBFLIVrikzFP7uZ/oD1vLVxKlfPfEGrQ8hWL4iuE6zBjhP32
+7Tx93re+D1tbCjQG2hRLglWY8EM9UtUzE7Kun007yIrGDkwhIy9jBRykCNjKzsxs
+qGTyHdYqNWXiH/ZqHYFc3JY+GDeeaPl7Lr/QLWUPpCb7c8mlXzwAAM8qty+3WVfY
+MgDPwPX5cgEG7mYhriHfFy3Si3hJkEw0TzIL5nPDc2nn/pUoHCHrVZqOp0aGHVkk

[incubator-hawq] Git Push Summary

2017-07-10 Thread huor
Repository: incubator-hawq
Updated Tags:  refs/tags/rel/v2.2.0.0-incubating [created] cfbae94ad


svn commit: r20222 - in /dev/incubator/hawq/2.2.0.0-incubating.RC3: apache-hawq-rpm-2.2.0.0-incubating.tar.gz apache-hawq-rpm-2.2.0.0-incubating.tar.gz.asc apache-hawq-rpm-2.2.0.0-incubating.tar.gz.md

2017-06-27 Thread huor
Author: huor
Date: Wed Jun 28 03:37:18 2017
New Revision: 20222

Log:
Update 2.2.0.0-incubating RC3 candidate release artifacts to include HAWQ core 
and PXF only

Modified:

dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz

dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.asc

dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.md5

dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.sha256

Modified: 
dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz
==
Binary files - no diff available.

Modified: 
dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.asc
==
--- 
dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.asc
 (original)
+++ 
dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.asc
 Wed Jun 28 03:37:18 2017
@@ -1,16 +1,16 @@
 -BEGIN PGP SIGNATURE-
 
-iQIzBAABCgAdFiEE2+lgW9iO64/ujs2xj+zaiBuLaHIFAllOBLsACgkQj+zaiBuL
-aHI+AxAAlwLG18/MByxycIU53jOg/6LpzmjxsADLQbf595rMtFYHj99Nc7sPJK+T
-vN+6iOhk1D6Cp3y2Z6TVaWkSIPPQAr98GcudBqXYRUBNDalokP4OIK2uEWEdua3D
-KRwd7lCTCOp9sVeBIwhx+RSXR7LQ2/3wG44Mc/PWdCG0z+oIVo4y/a2mLkhHl5j0
-SuHUR5Ge6VLJK6u1dlybbihV7e5jmhMPacVC/Iuzn6UOe/x5KnrJ9eucgH/R7FDW
-bcgxL5Ti35/IHJ0VrQT3FLclXg7xauXqG+/z1UIH+OX3wE9wyBnjZoUYi+IrKGRR
-5FW9iFClIQhfRntJQy9EBVHtrzbJun5OqZpALfRc457cY5Tp9KpbLLh9rSRcXKIt
-CqOPiBoxBIJ3SQQRUnDTKO75UquyakylXlPaUYGmkWv19WaTZV8kCPsY0/7Lf/nP
-2TLNZe6DhurrZzEV7cHXVJqt7aIaWZipeG5r+ufbcjznMgXAk6qRv5MQm4LsGi6H
-bqoT6eZUSfxuAtaV539c/zjKXkRzIAamv1g1tD8TrtjUpf8aXErU+xY2FulRcqhA
-PA3qNiGpdNWpLzFA4zeUcSThg9n3LjoZMK35YrYYZWeUgepF5BM67wc260R+T25b
-UrqR74y/BLcyLKncW60aj1UBUBCMdsdItaKDYfBIB9aOUerbOiw=
-=iIXT
+iQIzBAABCgAdFiEE2+lgW9iO64/ujs2xj+zaiBuLaHIFAllTIXgACgkQj+zaiBuL
+aHIqWw/8Cj+agw4ezoN1T0qOMKRRHrf1jVHD1nxVWLFtj01aioV/zWrllvPR6RsM
+T1f4bIGd5/aCkZ/1o2tPxchjxmUr9IroN68nHItZ/Kr4RKG9rIPa+YzREegacT7V
+poUeV0Rsyvdl15Sck51IudpMzLiKIyLx4kuyR4nr0gE4ywi08HYMCuGmwgRcOkjE
+bzsggolIOPosZp9zswxnPDbpmjE6DzvExyYdkIMmzzAkChWx/tb7jSp+6F2DJq/2
+w0FGTVga1LstyVyaeUhMHTrR0eqYiijGZlu7WJW8iX0t/2+osMuyGAooUblU7hIl
+rPDm4pf+7RjR19p4X7OMU9YYhT6xhiaGvEawtBR7CY1LGmsy77w8sXJHZAlgaAoK
+X9n5s0pibGHkDHon6ojEYSuZ0aFHKF/ahMjRi1av8++gyZr4UAEVjib5UQre14NA
+uk2gV3pFXgjldYGyV4gFXmBBk1wGN6pDyPaTujtLJ0GZSwy8A493C+C6vkLP188+
+biIFC4wqYHHi72r6ZTEfWdm0VqQyekF9AvupsOBKdjhBVmo2i/Smh3zJmWgrRCER
+yt1iAqXxy0PqQjLypV+R2CBbiSE39PqfCiTvUBrC3xgq2gShprBZeARI6ahNR3Se
+F2HieJ1a5hxc8SQyLkDtr+kxqtQQrZaO7l4/pdw9R1dp133oiCI=
+=vAKp
 -END PGP SIGNATURE-

Modified: 
dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.md5
==
--- 
dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.md5
 (original)
+++ 
dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.md5
 Wed Jun 28 03:37:18 2017
@@ -1 +1 @@
-MD5 (apache-hawq-rpm-2.2.0.0-incubating.tar.gz) = 
4556afa7f7170ab5a40fbec2607d00b5
+MD5 (apache-hawq-rpm-2.2.0.0-incubating.tar.gz) = 
8624f6cab77657ce5af8ac31ce5b6716

Modified: 
dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.sha256
==
--- 
dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.sha256
 (original)
+++ 
dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.sha256
 Wed Jun 28 03:37:18 2017
@@ -1 +1 @@
-b7d756a2d8a3178d0a277acb89a204633dafc9839bb02353003ef0ee3470728e  
apache-hawq-rpm-2.2.0.0-incubating.tar.gz
+0b104c5ca867a08c9465bd343bed731ab784c1b3f2efdab8a1ba50189c990fba  
apache-hawq-rpm-2.2.0.0-incubating.tar.gz




svn commit: r20181 - /dev/incubator/hawq/2.2.0.0-incubating.RC3/

2017-06-24 Thread huor
Author: huor
Date: Sat Jun 24 07:07:06 2017
New Revision: 20181

Log:
Add 2.2.0.0-incubating RC3 candidate release artifacts

Added:
dev/incubator/hawq/2.2.0.0-incubating.RC3/

dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz
   (with props)

dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.asc

dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.md5

dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.sha256

dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-src-2.2.0.0-incubating.tar.gz
   (with props)

dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-src-2.2.0.0-incubating.tar.gz.asc

dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-src-2.2.0.0-incubating.tar.gz.md5

dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-src-2.2.0.0-incubating.tar.gz.sha256

Added: 
dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz
==
Binary file - no diff available.

Propchange: 
dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz
--
svn:mime-type = application/octet-stream

Added: 
dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.asc
==
--- 
dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.asc
 (added)
+++ 
dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.asc
 Sat Jun 24 07:07:06 2017
@@ -0,0 +1,16 @@
+-BEGIN PGP SIGNATURE-
+
+iQIzBAABCgAdFiEE2+lgW9iO64/ujs2xj+zaiBuLaHIFAllOBLsACgkQj+zaiBuL
+aHI+AxAAlwLG18/MByxycIU53jOg/6LpzmjxsADLQbf595rMtFYHj99Nc7sPJK+T
+vN+6iOhk1D6Cp3y2Z6TVaWkSIPPQAr98GcudBqXYRUBNDalokP4OIK2uEWEdua3D
+KRwd7lCTCOp9sVeBIwhx+RSXR7LQ2/3wG44Mc/PWdCG0z+oIVo4y/a2mLkhHl5j0
+SuHUR5Ge6VLJK6u1dlybbihV7e5jmhMPacVC/Iuzn6UOe/x5KnrJ9eucgH/R7FDW
+bcgxL5Ti35/IHJ0VrQT3FLclXg7xauXqG+/z1UIH+OX3wE9wyBnjZoUYi+IrKGRR
+5FW9iFClIQhfRntJQy9EBVHtrzbJun5OqZpALfRc457cY5Tp9KpbLLh9rSRcXKIt
+CqOPiBoxBIJ3SQQRUnDTKO75UquyakylXlPaUYGmkWv19WaTZV8kCPsY0/7Lf/nP
+2TLNZe6DhurrZzEV7cHXVJqt7aIaWZipeG5r+ufbcjznMgXAk6qRv5MQm4LsGi6H
+bqoT6eZUSfxuAtaV539c/zjKXkRzIAamv1g1tD8TrtjUpf8aXErU+xY2FulRcqhA
+PA3qNiGpdNWpLzFA4zeUcSThg9n3LjoZMK35YrYYZWeUgepF5BM67wc260R+T25b
+UrqR74y/BLcyLKncW60aj1UBUBCMdsdItaKDYfBIB9aOUerbOiw=
+=iIXT
+-END PGP SIGNATURE-

Added: 
dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.md5
==
--- 
dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.md5
 (added)
+++ 
dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.md5
 Sat Jun 24 07:07:06 2017
@@ -0,0 +1 @@
+MD5 (apache-hawq-rpm-2.2.0.0-incubating.tar.gz) = 
4556afa7f7170ab5a40fbec2607d00b5

Added: 
dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.sha256
==
--- 
dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.sha256
 (added)
+++ 
dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.sha256
 Sat Jun 24 07:07:06 2017
@@ -0,0 +1 @@
+b7d756a2d8a3178d0a277acb89a204633dafc9839bb02353003ef0ee3470728e  
apache-hawq-rpm-2.2.0.0-incubating.tar.gz

Added: 
dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-src-2.2.0.0-incubating.tar.gz
==
Binary file - no diff available.

Propchange: 
dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-src-2.2.0.0-incubating.tar.gz
--
svn:mime-type = application/octet-stream

Added: 
dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-src-2.2.0.0-incubating.tar.gz.asc
==
--- 
dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-src-2.2.0.0-incubating.tar.gz.asc
 (added)
+++ 
dev/incubator/hawq/2.2.0.0-incubating.RC3/apache-hawq-src-2.2.0.0-incubating.tar.gz.asc
 Sat Jun 24 07:07:06 2017
@@ -0,0 +1,16 @@
+-BEGIN PGP SIGNATURE-
+
+iQIzBAABCgAdFiEE2+lgW9iO64/ujs2xj+zaiBuLaHIFAllOBQoACgkQj+zaiBuL
+aHIiMRAAn0Z6aOZehVImpw0xgHf235uYhM7+vaUrQ+Y0JiaAgwafWxJ8VJITb74+
+1slnZkyrkbW95JNSA5TnzApr2ygKBgMtVdR+WiEEVhJbwdEH5LiCa3jUw55u3fmg
+1taQJZpFXqvLoyljUBFLIVrikzFP7uZ/oD1vLVxKlfPfEGrQ8hWL4iuE6zBjhP32
+7Tx93re+D1tbCjQG2hRLglWY8EM9UtUzE7Kun007yIrGDkwhIy9jBRykCNjKzsxs
+qGTyHdYqNWXiH/ZqHYFc3JY+GDeeaPl7Lr/QLWUPpCb7c8mlXzwAAM8qty+3WVfY
+MgDPwPX5cgEG7mYhriHfFy3Si3hJkEw0TzIL5nPDc2nn

[incubator-hawq] Git Push Summary

2017-06-23 Thread huor
Repository: incubator-hawq
Updated Tags:  refs/tags/2.2.0.0-incubating-rc3 [created] 7604e4c75


incubator-hawq git commit: HAWQ-1475. Add LICENSE, NOTICE, and DISCLAIMER files for Apache HAWQ binary release c/c++ components

2017-06-22 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/2.2.0.0-incubating fbc03d82e -> c08aa560a


HAWQ-1475. Add LICENSE, NOTICE, and DISCLAIMER files for Apache HAWQ binary 
release c/c++ components


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/c08aa560
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/c08aa560
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/c08aa560

Branch: refs/heads/2.2.0.0-incubating
Commit: c08aa560a65af25f1357fc49d736407aff124d05
Parents: fbc03d8
Author: Ruilong Huo 
Authored: Sat May 27 15:41:35 2017 +0800
Committer: Ruilong Huo 
Committed: Fri Jun 23 13:23:18 2017 +0800

--
 GNUmakefile.in   |   1 +
 dist/Makefile|  31 +++
 dist/hawq/DISCLAIMER |  11 ++
 dist/hawq/LICENSE| 490 ++
 dist/hawq/Makefile   |  35 
 dist/hawq/NOTICE |  35 
 6 files changed, 603 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/c08aa560/GNUmakefile.in
--
diff --git a/GNUmakefile.in b/GNUmakefile.in
index 5e97160..e6d01d0 100644
--- a/GNUmakefile.in
+++ b/GNUmakefile.in
@@ -30,6 +30,7 @@ install:
$(MAKE) -C contrib $@
$(MAKE) -C tools $@
$(MAKE) -C ranger-plugin $@
+   $(MAKE) -C dist $@
@echo "HAWQ installation complete."
 
 installdirs uninstall:

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/c08aa560/dist/Makefile
--
diff --git a/dist/Makefile b/dist/Makefile
new file mode 100644
index 000..bff1541
--- /dev/null
+++ b/dist/Makefile
@@ -0,0 +1,31 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#-
+#
+# Makefile for license information of the rpm package
+#
+#-
+
+subdir = dist
+top_builddir = ..
+include $(top_builddir)/src/Makefile.global
+
+all: install
+
+install:
+   ${MAKE} -C hawq $@

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/c08aa560/dist/hawq/DISCLAIMER
--
diff --git a/dist/hawq/DISCLAIMER b/dist/hawq/DISCLAIMER
new file mode 100644
index 000..8c2058c
--- /dev/null
+++ b/dist/hawq/DISCLAIMER
@@ -0,0 +1,11 @@
+Apache HAWQ is an effort undergoing incubation at the Apache Software
+Foundation (ASF), sponsored by the Apache Incubator PMC.
+
+Incubation is required of all newly accepted projects until a further
+review indicates that the infrastructure, communications, and decision
+making process have stabilized in a manner consistent with other
+successful ASF projects.
+
+While incubation status is not necessarily a reflection of the
+completeness or stability of the code, it does indicate that the
+project has yet to be fully endorsed by the ASF.

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/c08aa560/dist/hawq/LICENSE
--
diff --git a/dist/hawq/LICENSE b/dist/hawq/LICENSE
new file mode 100644
index 000..3da3ea4
--- /dev/null
+++ b/dist/hawq/LICENSE
@@ -0,0 +1,490 @@
+ Apache License
+   Version 2.0, January 2004
+http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+  "License" shall mean the terms and conditions for use, reproduction,
+  and distribution as defined by Sections 1 through 9 of this document.
+
+  "Licensor" shall mean the copyright owner or entity authorized by
+  the copyright owner that is granting the License.
+
+  "Legal Entity" shall mean the union of the acting entity and all
+  other entities that control, are controlled by, or are under common
+  control with that entity. For the purposes of th

incubator-hawq git commit: fix share input scan bug for writer part

2017-06-21 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 4ef7022e7 -> 339806f3a


fix share input scan bug for writer part


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/339806f3
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/339806f3
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/339806f3

Branch: refs/heads/master
Commit: 339806f3a40cf85686496412984e65ebfb481dbd
Parents: 4ef7022
Author: amyrazz44 
Authored: Mon May 8 17:27:07 2017 +0800
Committer: Ruilong Huo 
Committed: Thu Jun 22 07:32:19 2017 +0800

--
 src/backend/executor/nodeMaterial.c   |  38 +-
 src/backend/executor/nodeShareInputScan.c | 158 -
 src/backend/utils/misc/guc.c  |  12 +-
 src/include/executor/nodeMaterial.h   |   1 +
 src/include/executor/nodeShareInputScan.h |   2 +
 src/include/utils/guc.h   |   2 +
 6 files changed, 203 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/339806f3/src/backend/executor/nodeMaterial.c
--
diff --git a/src/backend/executor/nodeMaterial.c 
b/src/backend/executor/nodeMaterial.c
index f2b82b2..4589351 100644
--- a/src/backend/executor/nodeMaterial.c
+++ b/src/backend/executor/nodeMaterial.c
@@ -41,19 +41,21 @@
 #include "postgres.h"
 
 #include "executor/executor.h"
-#include "executor/nodeMaterial.h"
 #include "executor/instrument.h"/* Instrumentation */
 #include "utils/tuplestorenew.h"
-
+#include "executor/nodeMaterial.h"
 #include "miscadmin.h"
 
 #include "cdb/cdbvars.h"
+#include "postmaster/primary_mirror_mode.h"
 
+
+static int sisc_writer_lock_fd = -1;
 static void ExecMaterialExplainEnd(PlanState *planstate, struct StringInfoData 
*buf);
 static void ExecChildRescan(MaterialState *node, ExprContext *exprCtxt);
 static void DestroyTupleStore(MaterialState *node);
 static void ExecMaterialResetWorkfileState(MaterialState *node);
-
+static void mkLockFileForWriter(int size, int share_id, char * name);
 
 /* 
  * ExecMaterial
@@ -115,6 +117,7 @@ ExecMaterial(MaterialState *node)
 
ts = ntuplestore_create_readerwriter(rwfile_prefix, 
PlanStateOperatorMemKB((PlanState *)node) * 1024, true);
tsa = ntuplestore_create_accessor(ts, true);
+   mkLockFileForWriter(MAXPGPATH, ma->share_id, "writer");
}
else
{
@@ -247,6 +250,8 @@ ExecMaterial(MaterialState *node)
 
node->share_lk_ctxt = 
shareinput_writer_notifyready(ma->share_id, ma->nsharer_xslice,

estate->es_plannedstmt->planGen);
+   if(sisc_writer_lock_fd > 0)
+   close(sisc_writer_lock_fd);
}
}
return NULL;
@@ -759,3 +764,30 @@ ExecEagerFreeMaterial(MaterialState *node)
}
 }
 
+
+/*
+ * mkLockFileForWriter
+ * 
+ * Create a unique lock file for writer, then use flock() to lock/unlock the 
lock file.
+ * We can make sure the lock file will be locked forerver until the writer 
process quits.
+ */
+static void mkLockFileForWriter(int size, int share_id, char * name)
+{
+   char *lock_file;
+   int lock;
+
+   lock_file = (char *)palloc0(size);
+   generate_lock_file_name(lock_file, size, share_id, name);
+   elog(DEBUG3, "The lock file for writer in SISC is %s", lock_file);
+   sisc_writer_lock_fd = open(lock_file, O_CREAT, S_IRWXU);
+   if(sisc_writer_lock_fd < 0)
+   {
+   elog(ERROR, "Could not create lock file %s for writer in SISC. 
The error number is %d", lock_file, errno);
+   }
+   lock = flock(sisc_writer_lock_fd, LOCK_EX | LOCK_NB);
+   if(lock == -1)
+   elog(DEBUG3, "Could not lock lock file  \"%s\" for writer in 
SISC . The error number is %d", lock_file, errno);
+   else if(lock == 0)
+   elog(LOG, "Successfully locked lock file  \"%s\" for writer in 
SISC.", lock_file);
+   pfree(lock_file);
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/339806f3/src/backend/executor/nodeShareInputScan.c
--
diff --git a/src/backend/executor/nodeShareInputScan.c 
b/src/backend/executor/nodeShareInputScan.c
index 049943b..88c695d 100644
--- a/src/backend/executor/nodeShareInputScan.c
+++ b/src/backend/executor/nodeShareInputScan.c
@@ -43,7 +43,6 @@
 #include "cdb/cdbvars.h"
 #include "executor/e

incubator-hawq git commit: HAWQ-1487. Fix hang process due to deadlock when it try to process interrupt in error handling

2017-06-20 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master b53484511 -> 4ef7022e7


HAWQ-1487. Fix hang process due to deadlock when it try to process interrupt in 
error handling


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/4ef7022e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/4ef7022e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/4ef7022e

Branch: refs/heads/master
Commit: 4ef7022e70e53e19e4bc3d2f768324f292304efb
Parents: b534845
Author: Ruilong Huo 
Authored: Tue Jun 13 18:11:01 2017 +0800
Committer: Ruilong Huo 
Committed: Wed Jun 21 09:45:18 2017 +0800

--
 src/backend/utils/error/elog.c | 56 -
 1 file changed, 55 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/4ef7022e/src/backend/utils/error/elog.c
--
diff --git a/src/backend/utils/error/elog.c b/src/backend/utils/error/elog.c
index 57ce2ce..a54177b 100644
--- a/src/backend/utils/error/elog.c
+++ b/src/backend/utils/error/elog.c
@@ -489,7 +489,23 @@ errstart(int elevel, const char *filename, int lineno,
 edata->saved_errno = errno;
 
 #ifndef WIN32
+bool save_ImmediateInterruptOK = ImmediateInterruptOK;
+/*
+ * We may be called while ImmediateInterruptOK is true; turn it off
+ * while messing with elog processing.
+ */
+ImmediateInterruptOK = false;
+
 edata->stacktracesize = backtrace(edata->stacktracearray, 30);
+
+/*
+ * Restore ImmediateInterruptOK, and check for interrupts if needed.
+ */
+ImmediateInterruptOK = save_ImmediateInterruptOK;
+if (save_ImmediateInterruptOK)
+{
+CHECK_FOR_INTERRUPTS();
+}
 #else
 edata->stacktracesize = 0;
 #endif
@@ -4371,7 +4387,23 @@ uint32 gp_backtrace(void **stackAddresses, uint32 
maxStackDepth)
}
else
{
+   bool save_ImmediateInterruptOK = ImmediateInterruptOK;
+   /*
+* We may be called while ImmediateInterruptOK is true; turn it 
off
+* while messing with elog processing.
+*/
+   ImmediateInterruptOK = false;
+
depth  = backtrace(stackAddresses, maxStackDepth);
+
+   /*
+* Restore ImmediateInterruptOK, and check for interrupts if 
needed.
+*/
+   ImmediateInterruptOK = save_ImmediateInterruptOK;
+   if (save_ImmediateInterruptOK)
+   {
+   CHECK_FOR_INTERRUPTS();
+   }
}
 
Assert(depth > 0);
@@ -4379,7 +4411,29 @@ uint32 gp_backtrace(void **stackAddresses, uint32 
maxStackDepth)
return depth;
 
 #else
-   return backtrace(stackAddresses, maxStackDepth);
+   bool save_ImmediateInterruptOK = ImmediateInterruptOK;
+   /*
+* We may be called while ImmediateInterruptOK is true; turn it off
+* while messing with elog processing.
+*/
+   ImmediateInterruptOK = false;
+
+   uint32 depth = 0;
+
+   depth = backtrace(stackAddresses, maxStackDepth);
+
+   Assert (depth > 0);
+
+   /*
+* Restore ImmediateInterruptOK, and check for interrupts if needed.
+*/
+   ImmediateInterruptOK = save_ImmediateInterruptOK;
+   if (save_ImmediateInterruptOK)
+   {
+   CHECK_FOR_INTERRUPTS();
+   }
+
+   return depth;
 #endif
 }
 



incubator-hawq git commit: HAWQ-1475. Add LICENSE, NOTICE, and DISCLAIMER files for Apache HAWQ binary release c/c++ components

2017-06-06 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 721f90ff1 -> 099557973


HAWQ-1475. Add LICENSE, NOTICE, and DISCLAIMER files for Apache HAWQ binary 
release c/c++ components


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/09955797
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/09955797
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/09955797

Branch: refs/heads/master
Commit: 0995579735f101b0f46b7e1d97465ec7c58de81d
Parents: 721f90f
Author: Ruilong Huo 
Authored: Sat May 27 15:41:35 2017 +0800
Committer: Ruilong Huo 
Committed: Wed Jun 7 13:28:31 2017 +0800

--
 GNUmakefile.in   |   1 +
 dist/Makefile|  31 +++
 dist/hawq/DISCLAIMER |  11 ++
 dist/hawq/LICENSE| 490 ++
 dist/hawq/Makefile   |  35 
 dist/hawq/NOTICE |  35 
 6 files changed, 603 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/09955797/GNUmakefile.in
--
diff --git a/GNUmakefile.in b/GNUmakefile.in
index 5e97160..e6d01d0 100644
--- a/GNUmakefile.in
+++ b/GNUmakefile.in
@@ -30,6 +30,7 @@ install:
$(MAKE) -C contrib $@
$(MAKE) -C tools $@
$(MAKE) -C ranger-plugin $@
+   $(MAKE) -C dist $@
@echo "HAWQ installation complete."
 
 installdirs uninstall:

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/09955797/dist/Makefile
--
diff --git a/dist/Makefile b/dist/Makefile
new file mode 100644
index 000..bff1541
--- /dev/null
+++ b/dist/Makefile
@@ -0,0 +1,31 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#-
+#
+# Makefile for license information of the rpm package
+#
+#-
+
+subdir = dist
+top_builddir = ..
+include $(top_builddir)/src/Makefile.global
+
+all: install
+
+install:
+   ${MAKE} -C hawq $@

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/09955797/dist/hawq/DISCLAIMER
--
diff --git a/dist/hawq/DISCLAIMER b/dist/hawq/DISCLAIMER
new file mode 100644
index 000..8c2058c
--- /dev/null
+++ b/dist/hawq/DISCLAIMER
@@ -0,0 +1,11 @@
+Apache HAWQ is an effort undergoing incubation at the Apache Software
+Foundation (ASF), sponsored by the Apache Incubator PMC.
+
+Incubation is required of all newly accepted projects until a further
+review indicates that the infrastructure, communications, and decision
+making process have stabilized in a manner consistent with other
+successful ASF projects.
+
+While incubation status is not necessarily a reflection of the
+completeness or stability of the code, it does indicate that the
+project has yet to be fully endorsed by the ASF.

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/09955797/dist/hawq/LICENSE
--
diff --git a/dist/hawq/LICENSE b/dist/hawq/LICENSE
new file mode 100644
index 000..3da3ea4
--- /dev/null
+++ b/dist/hawq/LICENSE
@@ -0,0 +1,490 @@
+ Apache License
+   Version 2.0, January 2004
+http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+  "License" shall mean the terms and conditions for use, reproduction,
+  and distribution as defined by Sections 1 through 9 of this document.
+
+  "Licensor" shall mean the copyright owner or entity authorized by
+  the copyright owner that is granting the License.
+
+  "Legal Entity" shall mean the union of the acting entity and all
+  other entities that control, are controlled by, or are under common
+  control with that entity. For the purposes of this definition,
+  "co

incubator-hawq git commit: HAWQ-1411. Fix inconsistent json file for catalog of hawq 2.1

2017-05-07 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master d0befa185 -> 6616ba71b


HAWQ-1411. Fix inconsistent json file for catalog of hawq 2.1


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/6616ba71
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/6616ba71
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/6616ba71

Branch: refs/heads/master
Commit: 6616ba71b97f072ed367e657049878202699a8dd
Parents: d0befa1
Author: Ruilong Huo 
Authored: Mon May 8 12:20:27 2017 +0800
Committer: Ruilong Huo 
Committed: Mon May 8 12:20:27 2017 +0800

--
 tools/bin/gppylib/data/2.1.json | 16 
 1 file changed, 8 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/6616ba71/tools/bin/gppylib/data/2.1.json
--
diff --git a/tools/bin/gppylib/data/2.1.json b/tools/bin/gppylib/data/2.1.json
index 019c378..293e649 100644
--- a/tools/bin/gppylib/data/2.1.json
+++ b/tools/bin/gppylib/data/2.1.json
@@ -1,5 +1,5 @@
 {
-   "__comment" : "Generated by tidycat.pl version 34 on Sat Jan  2 21:08:04 
2016 CATALOG_VERSION_NO=201507221",
+   "__comment" : "Generated by tidycat.pl version 34 on Mon Mar 27 14:22:31 
2017 CATALOG_VERSION_NO=201507221",
"__info" : {
   "CATALOG_VERSION_NO" : "201507221"
},
@@ -8252,9 +8252,9 @@
  "creationtime" : "timestamptz",
  "memorylimit" : "text",
  "nvseglowerlimit" : "int4",
- "nvseglowerlimitperseg" : "int4",
+ "nvseglowerlimitperseg" : "float4",
  "nvsegupperlimit" : "int4",
- "nvsegupperlimitperseg" : "int4",
+ "nvsegupperlimitperseg" : "float4",
  "oid" : "Oid",
  "parentoid" : "Oid",
  "resovercommit" : "float4",
@@ -8327,15 +8327,15 @@
  },
  {
 "colname" : "nvsegupperlimitperseg",
-"ctype" : "int4",
+"ctype" : "float4",
 "postcomment" : "-- vsegment size upper limit per segment",
-"sqltype" : "integer"
+"sqltype" : "real"
  },
  {
 "colname" : "nvseglowerlimitperseg",
-"ctype" : "int4",
+"ctype" : "float4",
 "postcomment" : "-- vsegment size lower limit per segment",
-"sqltype" : "integer"
+"sqltype" : "real"
  },
  {
 "colname" : "creationtime",
@@ -8388,7 +8388,7 @@
  }
   ],
   "relid_comment_tag" : "/* relation id: 6026 - pg_resqueue */\n",
-  "tabdef_text" : "\n   CREATE TABLE pg_resqueue\n   with 
(camelcase=ResQueue, shared=true, relid=6026, reltype_oid=9830, toast_oid=9820, 
toast_index=9821, toast_reltype=9822)\n   (\n   name name, -- name of resource 
queue\n   parentoid oid, -- oid of resource queue\n   activestats integer, -- 
active statement count limit\n   memorylimit text, -- memory limit in cluster\n 
  corelimit text, -- core limit in cluster\n   resovercommit real, -- resource 
upper limit in cluster\n   allocpolicy text, -- query resource allocation 
policy\n   vsegresourcequota text, -- vsegment resource quota\n   
nvsegupperlimit integer, -- vsegment size upper limit\n   nvseglowerlimit 
integer, -- vsegment size lower limit\n   nvsegupperlimitperseg integer, -- 
vsegment size upper limit per segment\n   nvseglowerlimitperseg integer, -- 
vsegment size lower limit per segment\n   creationtime timestamp with time 
zone, -- when the queue is created\n   updatetime timestamp with time zone, -- 
when the queue is u
 pdated ( create or alter )\n   status text, -- the status of resource queue\n  
 )",
+  "tabdef_text" : "\n   CREATE TABLE pg_resqueue\n   with 
(camelcase=ResQueue, shared=true, relid=6026, reltype_oid=9830, toast_oid=9820, 
toast_index=9821, toast_reltype=9822)\n   (\n   rsqname name, -- name of 
resource queue\n   parentoid oid, -- oid of resource queue\n   activestats 
integer, -- active statement count limit\n   memorylimit text, -- memory limit 
in cluster\n   corelimit text, -- core limit in cluster\n   resovercommit real, 
-- resource upper limit in cluster\n   allocpolicy text, -- query resource 
allocation policy\n   vsegresourcequota text, -- vsegment resource quota\n   
nvsegupperlimit integer, -- vsegment size upper limit\n   nvseglowerlimit 
integer, -- vsegment size lower limit\n   nvsegupperlimitperseg real, -- 
vsegment size upper limit per segment\n   nvseglowerlimitperseg real, -- 
vsegment size lower limit per segment\n   creationtime timestamp with time 
zone, -- when the queue is created\n   updatetime timestamp with time zone, -- 
when the queue is upda
 ted ( create or alter )\n   status text, -- the status of resource queue\n   
)",
   "t

incubator-hawq git commit: HAWQ-1412. Fix inconsistent json file for catalog of hawq 2.0

2017-05-07 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 50bf9a3e9 -> d0befa185


HAWQ-1412. Fix inconsistent json file for catalog of hawq 2.0


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/d0befa18
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/d0befa18
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/d0befa18

Branch: refs/heads/master
Commit: d0befa1853fab51ca14bbcc76c917287dde1d818
Parents: 50bf9a3
Author: Ruilong Huo 
Authored: Mon May 8 12:18:45 2017 +0800
Committer: Ruilong Huo 
Committed: Mon May 8 12:18:45 2017 +0800

--
 tools/bin/gppylib/data/2.0.json | 16 
 1 file changed, 8 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/d0befa18/tools/bin/gppylib/data/2.0.json
--
diff --git a/tools/bin/gppylib/data/2.0.json b/tools/bin/gppylib/data/2.0.json
index 019c378..3b05181 100644
--- a/tools/bin/gppylib/data/2.0.json
+++ b/tools/bin/gppylib/data/2.0.json
@@ -1,5 +1,5 @@
 {
-   "__comment" : "Generated by tidycat.pl version 34 on Sat Jan  2 21:08:04 
2016 CATALOG_VERSION_NO=201507221",
+   "__comment" : "Generated by tidycat.pl version 34 on Mon Mar 27 14:44:22 
2017 CATALOG_VERSION_NO=201507221",
"__info" : {
   "CATALOG_VERSION_NO" : "201507221"
},
@@ -8252,9 +8252,9 @@
  "creationtime" : "timestamptz",
  "memorylimit" : "text",
  "nvseglowerlimit" : "int4",
- "nvseglowerlimitperseg" : "int4",
+ "nvseglowerlimitperseg" : "float4",
  "nvsegupperlimit" : "int4",
- "nvsegupperlimitperseg" : "int4",
+ "nvsegupperlimitperseg" : "float4",
  "oid" : "Oid",
  "parentoid" : "Oid",
  "resovercommit" : "float4",
@@ -8327,15 +8327,15 @@
  },
  {
 "colname" : "nvsegupperlimitperseg",
-"ctype" : "int4",
+"ctype" : "float4",
 "postcomment" : "-- vsegment size upper limit per segment",
-"sqltype" : "integer"
+"sqltype" : "real"
  },
  {
 "colname" : "nvseglowerlimitperseg",
-"ctype" : "int4",
+"ctype" : "float4",
 "postcomment" : "-- vsegment size lower limit per segment",
-"sqltype" : "integer"
+"sqltype" : "real"
  },
  {
 "colname" : "creationtime",
@@ -8388,7 +8388,7 @@
  }
   ],
   "relid_comment_tag" : "/* relation id: 6026 - pg_resqueue */\n",
-  "tabdef_text" : "\n   CREATE TABLE pg_resqueue\n   with 
(camelcase=ResQueue, shared=true, relid=6026, reltype_oid=9830, toast_oid=9820, 
toast_index=9821, toast_reltype=9822)\n   (\n   name name, -- name of resource 
queue\n   parentoid oid, -- oid of resource queue\n   activestats integer, -- 
active statement count limit\n   memorylimit text, -- memory limit in cluster\n 
  corelimit text, -- core limit in cluster\n   resovercommit real, -- resource 
upper limit in cluster\n   allocpolicy text, -- query resource allocation 
policy\n   vsegresourcequota text, -- vsegment resource quota\n   
nvsegupperlimit integer, -- vsegment size upper limit\n   nvseglowerlimit 
integer, -- vsegment size lower limit\n   nvsegupperlimitperseg integer, -- 
vsegment size upper limit per segment\n   nvseglowerlimitperseg integer, -- 
vsegment size lower limit per segment\n   creationtime timestamp with time 
zone, -- when the queue is created\n   updatetime timestamp with time zone, -- 
when the queue is u
 pdated ( create or alter )\n   status text, -- the status of resource queue\n  
 )",
+  "tabdef_text" : "\n   CREATE TABLE pg_resqueue\n   with 
(camelcase=ResQueue, shared=true, relid=6026, reltype_oid=9830, toast_oid=9820, 
toast_index=9821, toast_reltype=9822)\n   (\n   rsqname name, -- name of 
resource queue\n   parentoid oid, -- oid of resource queue\n   activestats 
integer, -- active statement count limit\n   memorylimit text, -- memory limit 
in cluster\n   corelimit text, -- core limit in cluster\n   resovercommit real, 
-- resource upper limit in cluster\n   allocpolicy text, -- query resource 
allocation policy\n   vsegresourcequota text, -- vsegment resource quota\n   
nvsegupperlimit integer, -- vsegment size upper limit\n   nvseglowerlimit 
integer, -- vsegment size lower limit\n   nvsegupperlimitperseg real, -- 
vsegment size upper limit per segment\n   nvseglowerlimitperseg real, -- 
vsegment size lower limit per segment\n   creationtime timestamp with time 
zone, -- when the queue is created\n   updatetime timestamp with time zone, -- 
when the queue is upda
 ted ( create or alter )\n   status text, -- the status of resource queue\n   
)",
   "t

svn commit: r19096 - /dev/incubator/hawq/2.2.0.0-incubating.RC2/

2017-04-10 Thread huor
Author: huor
Date: Mon Apr 10 09:57:29 2017
New Revision: 19096

Log:
Add 2.2.0.0-incubating RC2 candidate release artifacts

Added:
dev/incubator/hawq/2.2.0.0-incubating.RC2/

dev/incubator/hawq/2.2.0.0-incubating.RC2/apache-hawq-rpm-2.2.0.0-incubating.tar.gz
   (with props)

dev/incubator/hawq/2.2.0.0-incubating.RC2/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.asc

dev/incubator/hawq/2.2.0.0-incubating.RC2/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.md5

dev/incubator/hawq/2.2.0.0-incubating.RC2/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.sha256

dev/incubator/hawq/2.2.0.0-incubating.RC2/apache-hawq-src-2.2.0.0-incubating.tar.gz
   (with props)

dev/incubator/hawq/2.2.0.0-incubating.RC2/apache-hawq-src-2.2.0.0-incubating.tar.gz.asc

dev/incubator/hawq/2.2.0.0-incubating.RC2/apache-hawq-src-2.2.0.0-incubating.tar.gz.md5

dev/incubator/hawq/2.2.0.0-incubating.RC2/apache-hawq-src-2.2.0.0-incubating.tar.gz.sha256

Added: 
dev/incubator/hawq/2.2.0.0-incubating.RC2/apache-hawq-rpm-2.2.0.0-incubating.tar.gz
==
Binary file - no diff available.

Propchange: 
dev/incubator/hawq/2.2.0.0-incubating.RC2/apache-hawq-rpm-2.2.0.0-incubating.tar.gz
--
svn:mime-type = application/octet-stream

Added: 
dev/incubator/hawq/2.2.0.0-incubating.RC2/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.asc
==
--- 
dev/incubator/hawq/2.2.0.0-incubating.RC2/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.asc
 (added)
+++ 
dev/incubator/hawq/2.2.0.0-incubating.RC2/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.asc
 Mon Apr 10 09:57:29 2017
@@ -0,0 +1,17 @@
+-BEGIN PGP SIGNATURE-
+Version: GnuPG v2
+
+iQIcBAABCgAGBQJY61G8AAoJEI/s2ogbi2hy9WgQAIHMl9MKBgaGpbSBdFJLjlxR
+k/OWL9q86cC/USc1bxIzcENEsbY7uGdjE3fIraQ+SIm27FTd7cNf9UlOJ6wJTffq
+dRVZSCl3cGauYAkglmLRlWyfqqNL0VTWuNAADu1DlPTVQ46/ELgowit56oaMpXWb
+7pkd0LdUqM3tz14dzeGVQlhaTrN74IsHtF8A5Xdvr2KeQlEQcLcUX+aakMZGtiFI
+W7aVtbuysTl0hPkUcpihenmAqIJ497LxrJ54MayMQpxIsTc6A6QqMPi71A4qCd5L
+uM4GvNrgQVsA1BjMOqvhymNBx9cUxo/E58hdxQjPndXo6JxQALjCkaAahn9/8GXN
+TOWm8OZllhy9sbK+1saKS1DqUZdFU7RX0smZCB7IqAKssbkBWeVUBIJ/8Mn3z/kq
+rwoYn1tYQ0bS/aEUetvwaJHYKLpJ2nv0FBRdvnFdjb0/X7lWOp9FD2IeAi4+p5+j
+5/5sfAkIEwKraFju18e2G2UyhBAD5R4jTavAO0u+3tWrhUBQY8yQpqZSyAEW54yS
+1jj14WxpoT7zCvfzpAKEHR32BJuaH5X+RUjA49aQKon2+Gm90Y9z911CkeuIl7Vi
+mfE9DQYMNwFu9jdKAl+XKiDB9+6EBDm4UY1p+aRTQN6nW/U/9rKywafQ6s1ep6tC
+9Ek6piRNQ9OzwOOg4gyS
+=aV3U
+-END PGP SIGNATURE-

Added: 
dev/incubator/hawq/2.2.0.0-incubating.RC2/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.md5
==
--- 
dev/incubator/hawq/2.2.0.0-incubating.RC2/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.md5
 (added)
+++ 
dev/incubator/hawq/2.2.0.0-incubating.RC2/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.md5
 Mon Apr 10 09:57:29 2017
@@ -0,0 +1 @@
+MD5 (apache-hawq-rpm-2.2.0.0-incubating.tar.gz) = 
ac3a8196e90e938604cca468e0978cab

Added: 
dev/incubator/hawq/2.2.0.0-incubating.RC2/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.sha256
==
--- 
dev/incubator/hawq/2.2.0.0-incubating.RC2/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.sha256
 (added)
+++ 
dev/incubator/hawq/2.2.0.0-incubating.RC2/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.sha256
 Mon Apr 10 09:57:29 2017
@@ -0,0 +1 @@
+7c4e047b89133bbe1f72994bcb75794ea382eb36b33cc404a88b1f8b70cc114a  
apache-hawq-rpm-2.2.0.0-incubating.tar.gz

Added: 
dev/incubator/hawq/2.2.0.0-incubating.RC2/apache-hawq-src-2.2.0.0-incubating.tar.gz
==
Binary file - no diff available.

Propchange: 
dev/incubator/hawq/2.2.0.0-incubating.RC2/apache-hawq-src-2.2.0.0-incubating.tar.gz
--
svn:mime-type = application/octet-stream

Added: 
dev/incubator/hawq/2.2.0.0-incubating.RC2/apache-hawq-src-2.2.0.0-incubating.tar.gz.asc
==
--- 
dev/incubator/hawq/2.2.0.0-incubating.RC2/apache-hawq-src-2.2.0.0-incubating.tar.gz.asc
 (added)
+++ 
dev/incubator/hawq/2.2.0.0-incubating.RC2/apache-hawq-src-2.2.0.0-incubating.tar.gz.asc
 Mon Apr 10 09:57:29 2017
@@ -0,0 +1,17 @@
+-BEGIN PGP SIGNATURE-
+Version: GnuPG v2
+
+iQIcBAABCgAGBQJY6u2EAAoJEI/s2ogbi2hyImwQAKpBr/bpoaYxxKRbCQP+Gkr5
+wQ1pWA9ZG0+af0JyfdtG2ae3lcKHM6HtYmAnDxkqF3bt7lCxHN8+GFR+Bn5yGmOc
+l2U/Mt0FWL7RHV2Zo/7BSrwJRbRs7w7oQZybytVP1Z4/Q2Vf9R5LiffjhziylK1p
+y21Mx1MMYsxMnrjhRUu2X2urH9r4x/INK8WLDa+II8+plE6bit3IhulGfAQByumT
+Y+rQ9x+KYREZ7QIBBBarQGZTiUnPq80INnH4/9Xz4Va4sXRVSS9KBkkgQ1vObMyP
+NTw7rRfp6nvMLw4mzRuWNQtC+nM5zqgBAK4KnBLNFVl71ZKYT389YjmRo/ZO9lkP
+ny5hVUetd3HeKXWkSQo

[incubator-hawq] Git Push Summary

2017-04-09 Thread huor
Repository: incubator-hawq
Updated Tags:  refs/tags/2.2.0.0-incubating-rc2 [created] 9c8c3eece


[incubator-hawq] Git Push Summary

2017-04-01 Thread huor
Repository: incubator-hawq
Updated Tags:  refs/tags/2.2.0.0-incubating-rc1 [created] bc4713beb


svn commit: r18997 - /dev/incubator/hawq/2.2.0.0-incubating.RC1/

2017-04-01 Thread huor
Author: huor
Date: Sat Apr  1 13:30:33 2017
New Revision: 18997

Log:
Add 2.2.0.0-incubating RC1 candidate release artifacts

Added:
dev/incubator/hawq/2.2.0.0-incubating.RC1/

dev/incubator/hawq/2.2.0.0-incubating.RC1/apache-hawq-rpm-2.2.0.0-incubating.tar.gz
   (with props)

dev/incubator/hawq/2.2.0.0-incubating.RC1/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.asc

dev/incubator/hawq/2.2.0.0-incubating.RC1/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.md5

dev/incubator/hawq/2.2.0.0-incubating.RC1/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.sha256

dev/incubator/hawq/2.2.0.0-incubating.RC1/apache-hawq-src-2.2.0.0-incubating.tar.gz
   (with props)

dev/incubator/hawq/2.2.0.0-incubating.RC1/apache-hawq-src-2.2.0.0-incubating.tar.gz.asc

dev/incubator/hawq/2.2.0.0-incubating.RC1/apache-hawq-src-2.2.0.0-incubating.tar.gz.md5

dev/incubator/hawq/2.2.0.0-incubating.RC1/apache-hawq-src-2.2.0.0-incubating.tar.gz.sha256

Added: 
dev/incubator/hawq/2.2.0.0-incubating.RC1/apache-hawq-rpm-2.2.0.0-incubating.tar.gz
==
Binary file - no diff available.

Propchange: 
dev/incubator/hawq/2.2.0.0-incubating.RC1/apache-hawq-rpm-2.2.0.0-incubating.tar.gz
--
svn:executable = *

Propchange: 
dev/incubator/hawq/2.2.0.0-incubating.RC1/apache-hawq-rpm-2.2.0.0-incubating.tar.gz
--
svn:mime-type = application/octet-stream

Added: 
dev/incubator/hawq/2.2.0.0-incubating.RC1/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.asc
==
--- 
dev/incubator/hawq/2.2.0.0-incubating.RC1/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.asc
 (added)
+++ 
dev/incubator/hawq/2.2.0.0-incubating.RC1/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.asc
 Sat Apr  1 13:30:33 2017
@@ -0,0 +1,17 @@
+-BEGIN PGP SIGNATURE-
+Version: GnuPG v2
+
+iQIcBAABCgAGBQJY36XyAAoJEI/s2ogbi2hytfsQAKG8s3Xp1leXzNbszA7j92fD
+1ov9qJ14VDbOoGuFsV01o8EgqpMP6VHbXfn0BYggcm9xA9iVeWsaKBA8JeJDBt5k
+FTccZEDoe/kbS4hXuY1SdpZnlRdEYnXev5gFAoi2Q6eBdBYCln+cCz1RttEt80sk
+duUOQiNyca1zt8JrzzzHlj6uzkpEIAMHZp+dewSgPfC4iSoH2ABHPMbOC3RuMR17
+eqna1g4Gfb/PQpKr7srit5T1yE8z338b5h6HwwvVV1NUyte3Cd8ABKc/tvk4p2MY
+AKp8UE5RqvUr2AWfe0iRFqeeshj+CVESOZWw8eNNkYtWi7ZJwXSuf2hVQ4YYuU40
+ZNvUz7uWr7ntQA6NTZn4oB6v3Azu9l1jJLu13Dm6Dcd3ZetDxqwZWj2MYWcid/Pa
+lel5Oba9y+j1V/lBulsp4Une442Wf4iHW422LQ1LTQ2sXU1pUANOuWelWECnrXQM
+mhn6RWas7iQ3mlwXLGUKSXCvRQYHHi2y6/EokY0oXNYJTr7vqkcXWdX+h9EuD6lA
+3wv/8pzkcvVTYEl/fX/yv6LDQi+QBLaQZ307/AY9xbEiXQDhbDj8IEtsiP1X/D13
+XDxpaZ3o774nIzzNTT4BtnGPB6MmPnbNnVhqV9dVo4Vq4D9fxQRG4DY6AxhE3oZa
+/7Gj+0cX1IKQBwXLkjRM
+=dFMe
+-END PGP SIGNATURE-

Added: 
dev/incubator/hawq/2.2.0.0-incubating.RC1/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.md5
==
--- 
dev/incubator/hawq/2.2.0.0-incubating.RC1/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.md5
 (added)
+++ 
dev/incubator/hawq/2.2.0.0-incubating.RC1/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.md5
 Sat Apr  1 13:30:33 2017
@@ -0,0 +1 @@
+MD5 (apache-hawq-rpm-2.2.0.0-incubating.tar.gz) = 
2435532b078f601a2b51946ce8e09531

Added: 
dev/incubator/hawq/2.2.0.0-incubating.RC1/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.sha256
==
--- 
dev/incubator/hawq/2.2.0.0-incubating.RC1/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.sha256
 (added)
+++ 
dev/incubator/hawq/2.2.0.0-incubating.RC1/apache-hawq-rpm-2.2.0.0-incubating.tar.gz.sha256
 Sat Apr  1 13:30:33 2017
@@ -0,0 +1 @@
+3933ce931b52913194b67588f15a92ffa8a931e763b9716a7fbba397493503ce  
apache-hawq-rpm-2.2.0.0-incubating.tar.gz

Added: 
dev/incubator/hawq/2.2.0.0-incubating.RC1/apache-hawq-src-2.2.0.0-incubating.tar.gz
==
Binary file - no diff available.

Propchange: 
dev/incubator/hawq/2.2.0.0-incubating.RC1/apache-hawq-src-2.2.0.0-incubating.tar.gz
--
svn:executable = *

Propchange: 
dev/incubator/hawq/2.2.0.0-incubating.RC1/apache-hawq-src-2.2.0.0-incubating.tar.gz
--
svn:mime-type = application/octet-stream

Added: 
dev/incubator/hawq/2.2.0.0-incubating.RC1/apache-hawq-src-2.2.0.0-incubating.tar.gz.asc
==
--- 
dev/incubator/hawq/2.2.0.0-incubating.RC1/apache-hawq-src-2.2.0.0-incubating.tar.gz.asc
 (added)
+++ 
dev/incubator/hawq/2.2.0.0-incubating.RC1/apache-hawq-src-2.2.0.0-incubating.tar.gz.asc
 Sat Apr  1 13:30:33 2017
@@ -0,0 +1,17 @@
+-BEGIN PGP SIGNATURE-
+Version: GnuPG v2
+
+iQIcBAABCgAGBQJY36WLAAoJEI/s2ogbi2hykpQP

[4/4] incubator-hawq git commit: HAWQ-1419. Run RPS service tests in separate JVMs

2017-03-28 Thread huor
HAWQ-1419. Run RPS service tests in separate JVMs

(this closes #1202)


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/fe6c0e9c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/fe6c0e9c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/fe6c0e9c

Branch: refs/heads/2.2.0.0-incubating
Commit: fe6c0e9c82d939dacc161bc15188aae0f7a48226
Parents: 1e9ea99
Author: Alexander Denissov 
Authored: Tue Mar 28 14:48:07 2017 -0700
Committer: Alexander Denissov 
Committed: Tue Mar 28 14:56:54 2017 -0700

--
 ranger-plugin/service/pom.xml | 9 +
 1 file changed, 9 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/fe6c0e9c/ranger-plugin/service/pom.xml
--
diff --git a/ranger-plugin/service/pom.xml b/ranger-plugin/service/pom.xml
index 500df1f..1633f68 100644
--- a/ranger-plugin/service/pom.xml
+++ b/ranger-plugin/service/pom.xml
@@ -61,6 +61,15 @@
 
 
 
+org.apache.maven.plugins
+maven-surefire-plugin
+2.19.1
+
+
+false
+
+
+
 maven-war-plugin
 3.0.0
 



[2/4] incubator-hawq git commit: HAWQ-1140. Rename yml file name and table name in TestUsage2Case1ErrorHashTableRegistry (close #1200)

2017-03-28 Thread huor
HAWQ-1140. Rename yml file name and table name in 
TestUsage2Case1ErrorHashTableRegistry
(close #1200)


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/46a9621b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/46a9621b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/46a9621b

Branch: refs/heads/2.2.0.0-incubating
Commit: 46a9621bb4b02e6932feb5bb0c07193e2812e66b
Parents: bbdcf95
Author: Chunling Wang 
Authored: Tue Mar 28 17:31:55 2017 +0800
Committer: Lili Ma 
Committed: Tue Mar 28 17:42:05 2017 +0800

--
 .../test_hawq_register_usage2_case1.cpp | 90 ++--
 1 file changed, 45 insertions(+), 45 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/46a9621b/src/test/feature/ManagementTool/test_hawq_register_usage2_case1.cpp
--
diff --git 
a/src/test/feature/ManagementTool/test_hawq_register_usage2_case1.cpp 
b/src/test/feature/ManagementTool/test_hawq_register_usage2_case1.cpp
index 3623528..997a549 100644
--- a/src/test/feature/ManagementTool/test_hawq_register_usage2_case1.cpp
+++ b/src/test/feature/ManagementTool/test_hawq_register_usage2_case1.cpp
@@ -214,51 +214,51 @@ TEST_F(TestHawqRegister, 
TestUsage2Case1HDFSFilePathContainErrorSymbol) {
 
 TEST_F(TestHawqRegister, TestUsage2Case1ErrorHashTableRegistry) {
 SQLUtility util;
-util.execute("drop table if exists t_1_1;");
-util.execute("drop table if exists t_1_2;");
-util.execute("drop table if exists t_1_3;");
-util.execute("drop table if exists t_2;");
-util.execute("drop table if exists nt_1;");
-util.execute("drop table if exists nt_2;");
-
-util.execute("create table t_1_1(i int, j int, k int) with 
(appendonly=true, orientation=row, bucketnum=12) distributed by (i, j);");
-util.execute("insert into t_1_1 select generate_series(1, 100);");
-util.query("select * from t_1_1;", 100);
-util.execute("create table t_1_2(i int, j int, k int) with 
(appendonly=true, orientation=row) distributed by (i);");
-util.execute("insert into t_1_2 select generate_series(1, 100);");
-util.query("select * from t_1_2;", 100);
-util.execute("create table t_1_3(i int, j int, k int) with 
(appendonly=true, orientation=row) distributed randomly;");
-util.execute("insert into t_1_3 select generate_series(1, 100);");
-util.query("select * from t_1_3;", 100);
-util.execute("create table t_2(i int, j int, k int) with (appendonly=true, 
orientation=row) distributed by (i);");
-util.execute("insert into t_2 select generate_series(1, 100);");
-util.query("select * from t_2;", 100);
-util.execute("create table nt_1(i int, j int, k int) with 
(appendonly=true, orientation=row) distributed by (i, j);");
-util.execute("insert into nt_1 select generate_series(1, 100);");
-util.query("select * from nt_1;", 100);
-util.execute("create table nt_2(i int, j int, k int) with 
(appendonly=true, orientation=row) distributed by (j);");
-util.execute("insert into nt_2 select generate_series(1, 100);");
-util.query("select * from nt_2;", 100);
-
-EXPECT_EQ(0, Command::getCommandStatus(hawq::test::stringFormat("hawq 
extract -d %s -o t_1_1.yml 
testhawqregister_testusage2case1errorhashtableregistry.t_1_1", HAWQ_DB)));
-EXPECT_EQ(1, Command::getCommandStatus(hawq::test::stringFormat("hawq 
register -d %s -c t_1_1.yml 
testhawqregister_testusage2case1errorhashtableregistry.nt_1", HAWQ_DB)));
-EXPECT_EQ(0, Command::getCommandStatus(hawq::test::stringFormat("hawq 
extract -d %s -o t_1_2.yml 
testhawqregister_testusage2case1errorhashtableregistry.t_1_2", HAWQ_DB)));
-EXPECT_EQ(1, Command::getCommandStatus(hawq::test::stringFormat("hawq 
register -d %s -c t_1_2.yml 
testhawqregister_testusage2case1errorhashtableregistry.nt_1", HAWQ_DB)));
-EXPECT_EQ(0, Command::getCommandStatus(hawq::test::stringFormat("hawq 
extract -d %s -o t_1_3.yml 
testhawqregister_testusage2case1errorhashtableregistry.t_1_3", HAWQ_DB)));
-EXPECT_EQ(1, Command::getCommandStatus(hawq::test::stringFormat("hawq 
register -d %s -c t_1_3.yml 
testhawqregister_testusage2case1errorhashtableregistry.nt_1", HAWQ_DB)));
-EXPECT_EQ(0, Command::getCommandStatus(hawq::test::stringFormat("hawq 
extract -d %s -o t_2.yml 
testhawqregister_testusage2case1errorhashtableregistry.t_2", HAWQ_DB)));
-EXPECT_EQ(1, Command::getCommandStatus(hawq::test::stringFormat("hawq 
register -d %s -c t_2.yml 
testhawqregister_testusage2case1errorhashtableregistry.nt_2", HAWQ_DB)));
-
-EXPECT_EQ(0, Command::getCommandStatus("rm -rf t_1_1.yml"));
-EXPECT_EQ(0, Command::getCommandStatus("rm -rf t_1_2.yml"));
-EXPECT_EQ(0, Command::getCommandStatus("rm -rf t_1_

[1/4] incubator-hawq git commit: HAWQ-1418. Print executing command for hawq register (close #1199)

2017-03-28 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/2.2.0.0-incubating c2ba42026 -> fe6c0e9c8


HAWQ-1418. Print executing command for hawq register
(close #1199)


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/bbdcf95a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/bbdcf95a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/bbdcf95a

Branch: refs/heads/2.2.0.0-incubating
Commit: bbdcf95a02a93c12ef1e786437cdad77c515ffa7
Parents: c2ba420
Author: Chunling Wang 
Authored: Tue Mar 28 17:08:35 2017 +0800
Committer: Lili Ma 
Committed: Tue Mar 28 17:20:29 2017 +0800

--
 tools/bin/hawqregister | 6 ++
 1 file changed, 6 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/bbdcf95a/tools/bin/hawqregister
--
diff --git a/tools/bin/hawqregister b/tools/bin/hawqregister
index 39d0c23..e5c3f6a 100755
--- a/tools/bin/hawqregister
+++ b/tools/bin/hawqregister
@@ -34,6 +34,12 @@ except ImportError, e:
 sys.stderr.write('Cannot import module, please check that you have source 
greenplum_path.sh\n')
 sys.exit(2)
 
+# print executing command
+cmd = "Executing Command: ";
+for arg in sys.argv:
+cmd += arg + " "
+logger.info(cmd);
+
 # setup logging
 logger = get_default_logger()
 EXECNAME = os.path.split(__file__)[-1]



[3/4] incubator-hawq git commit: HAWQ-1418. Move print executing command after setup logging

2017-03-28 Thread huor
HAWQ-1418. Move print executing command after setup logging


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/1e9ea994
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/1e9ea994
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/1e9ea994

Branch: refs/heads/2.2.0.0-incubating
Commit: 1e9ea99458a251c3084e69147bdde2c19e702049
Parents: 46a9621
Author: Chunling Wang 
Authored: Tue Mar 28 18:15:00 2017 +0800
Committer: Chunling Wang 
Committed: Tue Mar 28 18:17:07 2017 +0800

--
 tools/bin/hawqregister | 10 +-
 1 file changed, 5 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/1e9ea994/tools/bin/hawqregister
--
diff --git a/tools/bin/hawqregister b/tools/bin/hawqregister
index e5c3f6a..ff2bb96 100755
--- a/tools/bin/hawqregister
+++ b/tools/bin/hawqregister
@@ -34,17 +34,17 @@ except ImportError, e:
 sys.stderr.write('Cannot import module, please check that you have source 
greenplum_path.sh\n')
 sys.exit(2)
 
+# setup logging
+logger = get_default_logger()
+EXECNAME = os.path.split(__file__)[-1]
+setup_tool_logging(EXECNAME, getLocalHostname(), getUserName())
+
 # print executing command
 cmd = "Executing Command: ";
 for arg in sys.argv:
 cmd += arg + " "
 logger.info(cmd);
 
-# setup logging
-logger = get_default_logger()
-EXECNAME = os.path.split(__file__)[-1]
-setup_tool_logging(EXECNAME, getLocalHostname(), getUserName())
-
 def option_parser():
 '''option parser'''
 parser = OptParser(option_class=OptChecker,



[incubator-hawq] Git Push Summary

2017-03-28 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/2.2.0.0-incubating [created] c2ba42026


incubator-hawq git commit: HAWQ-1406. Update hawq version in pom.xml for hawq 2.2.0.0 incubating release

2017-03-27 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master c8fbc1e90 -> 4921e92c4


HAWQ-1406. Update hawq version in pom.xml for hawq 2.2.0.0 incubating release


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/4921e92c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/4921e92c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/4921e92c

Branch: refs/heads/master
Commit: 4921e92c46733d024629ecf02d7d01709fcd6c2f
Parents: c8fbc1e
Author: Ruilong Huo 
Authored: Mon Mar 27 17:09:31 2017 +0800
Committer: Ruilong Huo 
Committed: Mon Mar 27 17:09:31 2017 +0800

--
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/4921e92c/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 1ccaeee..9803532 100644
--- a/pom.xml
+++ b/pom.xml
@@ -22,7 +22,7 @@
  
   org.apache.hawq
   hawq
-  2.1
+  2.2
   pom
 
 



[1/2] incubator-hawq git commit: HAWQ-1406. Update HAWQ product version strings for 2.2.0.0-incubating release

2017-03-27 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 6041f4a51 -> c8fbc1e90


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/c8fbc1e9/tools/bin/gppylib/data/2.2.json
--
diff --git a/tools/bin/gppylib/data/2.2.json b/tools/bin/gppylib/data/2.2.json
new file mode 100644
index 000..e62b59d
--- /dev/null
+++ b/tools/bin/gppylib/data/2.2.json
@@ -0,0 +1,10458 @@
+{
+   "__comment" : "Generated by tidycat.pl version 34 on Mon Mar 27 14:07:18 
2017 CATALOG_VERSION_NO=201507221",
+   "__info" : {
+  "CATALOG_VERSION_NO" : "201507221"
+   },
+   "gp_configuration" : {
+  "CamelCaseRelationId" : "GpConfigurationRelationId",
+  "colh" : {
+ "content" : "int2",
+ "datadir" : "text",
+ "dbid" : "int2",
+ "definedprimary" : "bool",
+ "hostname" : "NameData",
+ "isprimary" : "bool",
+ "port" : "int4",
+ "valid" : "bool"
+  },
+  "cols" : [
+ {
+"colname" : "content",
+"ctype" : "int2",
+"precomment" : "\n**TK_BLANK_LINE**",
+"sqltype" : "smallint"
+ },
+ {
+"colname" : "definedprimary",
+"ctype" : "bool",
+"sqltype" : "boolean"
+ },
+ {
+"colname" : "dbid",
+"ctype" : "int2",
+"sqltype" : "smallint"
+ },
+ {
+"colname" : "isprimary",
+"ctype" : "bool",
+"sqltype" : "boolean"
+ },
+ {
+"colname" : "valid",
+"ctype" : "bool",
+"sqltype" : "boolean"
+ },
+ {
+"colname" : "hostname",
+"ctype" : "NameData",
+"sqltype" : "name"
+ },
+ {
+"colname" : "port",
+"ctype" : "int4",
+"sqltype" : "integer"
+ },
+ {
+"colname" : "datadir",
+"ctype" : "text",
+"sqltype" : "text"
+ }
+  ],
+  "filename" : "gp_configuration.h",
+  "indexes" : [
+ {
+"CamelCaseIndexId" : "GpConfigurationContentDefinedprimaryIndexId",
+"cols" : [
+   [
+  "content",
+  "int2_ops"
+   ],
+   [
+  "definedprimary",
+  "bool_ops"
+   ]
+],
+"indexid" : "6101",
+"unique" : "1",
+"with" : {
+   "indexid" : "6101"
+}
+ },
+ {
+"CamelCaseIndexId" : "GpConfigurationDbidIndexId",
+"cols" : [
+   [
+  "dbid",
+  "int2_ops"
+   ]
+],
+"indexid" : "6102",
+"unique" : "1",
+"with" : {
+   "indexid" : "6102"
+}
+ }
+  ],
+  "relid_comment_tag" : "/* relation id: 5000 - gp_configuration */\n",
+  "tabdef_text" : "\n   CREATE TABLE gp_configuration\n   with 
(shared=true, oid=false, relid=5000, content=MASTER_ONLY)\n   (\n   content 
smallint,\n   definedprimary  boolean,\n   dbidsmallint,\n   
isprimary   boolean,\n   valid   boolean,\n   hostname
name,\n   portinteger,\n   datadir text\n   )",
+  "with" : {
+ "bootstrap" : 0,
+ "camelcase" : "GpConfiguration",
+ "content" : "MASTER_ONLY",
+ "oid" : "",
+ "relid" : "5000",
+ "shared" : "1",
+ "text" : "with (shared=true, oid=false, relid=5000, 
content=MASTER_ONLY)"
+  }
+   },
+   "gp_configuration_history" : {
+  "CamelCaseRelationId" : "GpConfigHistoryRelationId",
+  "UppercaseReltypeOid" : "GP_CONFIGURATION_HISTORY_RELTYPE_OID",
+  "colh" : {
+ "description" : "text",
+ "hostname" : "text",
+ "registration_order" : "int4",
+ "time" : "timestamptz"
+  },
+  "cols" : [
+ {
+"colname" : "time",
+"ctype" : "timestamptz",
+"precomment" : "\n**TK_BLANK_LINE**",
+"sqltype" : "timestamp_with_time_zone"
+ },
+ {
+"colname" : "registration_order",
+"ctype" : "int4",
+"sqltype" : "integer"
+ },
+ {
+"colname" : "hostname",
+"ctype" : "text",
+"sqltype" : "text"
+ },
+ {
+"colname" : "description",
+"ctype" : "text",
+"sqltype" : "text"
+ }
+  ],
+  "filename" : "gp_configuration.h",
+  "relid_comment_tag" : "/* relation id: 5006 - gp_configuration_history 
*/\n",
+  "tabdef_text" : "\n   CREATE TABLE gp_configuration_history\n   with 
(camelcase=GpConfigHistory, shared=true, oid=false, relid=5006, 
reltype_oid=6434, cont

[2/2] incubator-hawq git commit: HAWQ-1406. Update HAWQ product version strings for 2.2.0.0-incubating release

2017-03-27 Thread huor
HAWQ-1406. Update HAWQ product version strings for 2.2.0.0-incubating release


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/c8fbc1e9
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/c8fbc1e9
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/c8fbc1e9

Branch: refs/heads/master
Commit: c8fbc1e902f814d1b71390da467ec7fa505fb556
Parents: 6041f4a
Author: Ruilong Huo 
Authored: Fri Mar 24 22:50:09 2017 +0800
Committer: Ruilong Huo 
Committed: Mon Mar 27 14:17:21 2017 +0800

--
 contrib/hawq-ambari-plugin/build.properties | 4 +-
 contrib/hawq-ambari-plugin/pom.xml  | 2 +-
 getversion  | 2 +-
 tools/bin/gppylib/data/2.2.json | 10458 +
 4 files changed, 10462 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/c8fbc1e9/contrib/hawq-ambari-plugin/build.properties
--
diff --git a/contrib/hawq-ambari-plugin/build.properties 
b/contrib/hawq-ambari-plugin/build.properties
index 696a550..aeb4259 100644
--- a/contrib/hawq-ambari-plugin/build.properties
+++ b/contrib/hawq-ambari-plugin/build.properties
@@ -1,8 +1,8 @@
-hawq.release.version=2.1.0
+hawq.release.version=2.2.0
 hawq.common.services.version=2.0.0
 pxf.release.version=3.2.0
 pxf.common.services.version=3.0.0
 hawq.repo.prefix=hawq
 hawq.addons.repo.prefix=hawq-add-ons
-repository.version=2.1.0.0
+repository.version=2.2.0.0
 default.stack=HDP-2.5

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/c8fbc1e9/contrib/hawq-ambari-plugin/pom.xml
--
diff --git a/contrib/hawq-ambari-plugin/pom.xml 
b/contrib/hawq-ambari-plugin/pom.xml
index 2367a57..ad55e84 100644
--- a/contrib/hawq-ambari-plugin/pom.xml
+++ b/contrib/hawq-ambari-plugin/pom.xml
@@ -21,7 +21,7 @@
   4.0.0
   org.apache.hawq
   hawq-ambari-plugin
-  2.1.0.0
+  2.2.0.0
   hawq-ambari-plugin
   http://maven.apache.org
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/c8fbc1e9/getversion
--
diff --git a/getversion b/getversion
index ac43748..8527657 100755
--- a/getversion
+++ b/getversion
@@ -18,7 +18,7 @@
 # under the License.
 #
 
-GP_VERSION="2.1.0.0-incubating"
+GP_VERSION="2.2.0.0-incubating"
 
 GP_BUILDNUMBER=dev
 if [ -f BUILD_NUMBER ] ; then



svn commit: r18889 - /release/incubator/hawq/KEYS

2017-03-23 Thread huor
Author: huor
Date: Fri Mar 24 06:46:55 2017
New Revision: 18889

Log:
Add Ruilong Huo (CODE SIGNING KEY) info

Modified:
release/incubator/hawq/KEYS

Modified: release/incubator/hawq/KEYS
==
--- release/incubator/hawq/KEYS (original)
+++ release/incubator/hawq/KEYS Fri Mar 24 06:46:55 2017
@@ -471,3 +471,62 @@ XkCUbhNwU+slXd46Xlt1PbYXipsMrlSn6/AoKn2v
 dUR+LUsJi54hCfW7WQv249kpR+BflURy+8I=
 =ilaS
 -END PGP PUBLIC KEY BLOCK-
+
+pub   4096R/1B8B6872 2017-01-10
+uid   [ultimate] Ruilong Huo (CODE SIGNING KEY) 
+sig 31B8B6872 2017-01-10  Ruilong Huo (CODE SIGNING KEY) 

+sub   4096R/EBE332AF 2017-01-10
+sig  1B8B6872 2017-01-10  Ruilong Huo (CODE SIGNING KEY) 

+
+-BEGIN PGP PUBLIC KEY BLOCK-
+Version: GnuPG v2
+
+mQINBFh0RswBEADRA5eV1MMT/+dM06pNrqDXNdLNl3PR7BJwNSfjM3PfkIAaVIB0
+r3VaRxRdeSCXxenrID412l3+mDIry80WM8QpP0U/xwCMHO2J3PIScOQfEIctI5Pf
+XVMP0Y0K1au2Lu8CUU/7i5xP7nrl+3A2zJ7R2G3UZGyIsLzUQy+1TewTa7pdaywa
+KKAVLAbItCaghXVInZ1SqOSZ3NufOM/ixFIioEEiCHpB/UsEJ+adEgZw+Wxnxl2C
++gg7cERsH0j4tmdI87roEWNjhSaqm1WPB31cBckOYkjQITXnFj76fuGs5v+u3IZe
+paJ/G5CcYyer5/zGT8omht3gzMaXDedbi6A6X7WozkLx9SLq9uQnPDuVYvzSa37u
+1Iyqe1Va5oDPvu7i71V6QKZHvSWI4BdKucOPNTkdnKWKYJXA7av38SQMklOPvGz1
+4QkxysjuJUFuehSgw9HKQ4inwn5C5EwI4+SXTEKwBHeMNSPD4y9bqzh78Ct2k07G
+eaqjTeSiSiJdbySsH3C47ITqpeTSHSEm4+ZgkWuFEBO4VRAEG0QIadmNFBNU3YmN
+Fz1xKZv/vd8refUeobyWHxSVEizABZHkQ6pab0apbYhkGLzqiuWmC2ZsUrphCp4y
+3Nqce/l+DsLUZTJNGZqTuauQiu9MYTZeS0OQPw9qox7WVEDjN27Uq3UepwARAQAB
+tDBSdWlsb25nIEh1byAoQ09ERSBTSUdOSU5HIEtFWSkgPGh1b3JAYXBhY2hlLm9y
+Zz6JAjcEEwEKACEFAlh0RswCGwMFCwkIBwMFFQoJCAsFFgIDAQACHgECF4AACgkQ
+j+zaiBuLaHJNoRAAh1d04GhpjkA0PKiccga1EbqSc2bo6pKyO6pJ8f+Vbqla1NXt
+rgAXaNm5JPhFnr//dkbnMKTbpGpIyUv24vHtRKVSVckhFB7r1pq6/faEqZ7nkh14
+rNBM9G/Z5pX3iAy/ooPYi0S2RtYwvme09DefOsCRSnwJ2AZQoD7OQJWDsU82Ua0a
+hl6F0I34JZcXF6zR9aX0AM4v7cm1yskxeHogCjQwzkZOan/S3JLNwMCsyZSz85j8
+4RT3zJ0/wQWQUy5ICv/m/+zyqNXtjkhXTF833zFlIYxmnCMLIuPQxJmrkJGm3opZ
+v7oFA3ZoZf4CjhUUfdT6OKn55/+eSOUWZz4xPwLVz0dcw2uUOkE7ZmuULSoz1jem
+3FiHOa+560Zazc7uEO8Pq6PbIRKd0NLdFJ7gAQjj9LQ511xIENMBh8TgqQZn7tm4
+8KYMR0N3ewdK3WBlxgh16JxkPZOWAKM7ROAjocMD6HoVdZjkQinwY8N/mLdKQoDv
+kKxJsqXyn8Y1o4V6KdvsO/kIcBjgXJXq24HKY4UUIhi4LyvCJ6pMuABFdzPRSCCv
+FTxnRz4XFI1ZFyeXUzUsV8dIjyYSusmFv7j5/OgKSZkAzB6HNTKWbY8Sxq4W1xx2
+WV/6zbFDc1NOhiw87MLiWFgsToe6xnFKxMKeC6BYGWVpGmV6rcR3lYVqKoK5Ag0E
+WHRGzAEQANlD6b1CkfNJmAcT89UxNX4Y7kvTmRz93o3SZoNYexSLKxdNgali5K1c
+yXWKoRmWoFsgU67KXvPBq5Qq1M6WYY3mDHzYdb68y6oQScifZ3FZo4+QXwyqI7bV
+3FJ/FnNCim/AhbCTGFQoTOPX+5YMYU1diyfOQp/X/vq+XjlaQ9VBG00hbFK7a/WL
+7SjLpfLneaDDXG5y0WS94p+8K91Haatnw6J1ldmiUCmBItG2vJZ4y5x80DWHvwlw
+ISuB89PoCjzpeufH5WtNTL3CDt7V9AkXUdr7v1jRpRiNnlCp8vN8lcsr4GzQe/mL
+1ClsjedDcCC7kw6vY+x5b50Vkk9o10JNEyWgDKi5kr2BaGtCZwpvXZGbTpwpeBKm
+FOQFoEwjWpu9bY4WGdyfEUn1GmyYG17Rvticj14oJlXbSHmT/DER3S5VbXMHuUbn
+soJJJXgs9rsTpl3iXE5Ly6ZFPRyYmHaRMKa6uWs26PS3KQLGyvoGFUxYk94ZuZWD
+B5+kJNlV1HgVRi1gzfuDbKBiKi14NIDCqL5mEeQ0PWarTbmQg8KYtqj7RYnLnoLg
+DSDHotH4IjmQ1ngQjEBPbtsIX4YdDBVbRNAWJ49sVjxs9veeqtxjPVmwefXCieDX
+2U2OXeGn1nK2ivGWbf03kn8yldDs5qHCCLVvmzmDnZa1Pz/ekWBZABEBAAGJAh8E
+GAEKAAkFAlh0RswCGwwACgkQj+zaiBuLaHKD1RAAjYu+vDdkqlduqf/ui+aNGWWY
+cgfSqbL0jWRvDAiBUWMjBzPXrnJQ/hPk07rGqLUs/QZ62ZuN+NDIn5ndtngBMwGY
+OrM7cbo/cvsRaitFhRoMMh9Egm05izNlFlso6+bOi8pQWTqFJ9JD+8rLqv8IoRo3
+rxpWW9+y8qqKRjLXuuALka51gP6tFambhT17vOV3HS/c6QkvAHC9aKwP25XiK536
+3dTZ8c99EZ9EeTG4EjUnsmZdWLqlk/mQ6JRdoQvbkWrb0IlaFVUpaH5zG++HhW8c
+Xu4/gdHQxJbPNO6E+ZJT86iYR/oUtongjj5kd3ECaQXJjSV7kQeUUq1uhqkJtyof
+YZscpNv7DnbskuDoJ7/VZOH/Ym1Y5ECFVLHhOOj9JfDvMst4Lu9lfyZOsBJkgWZN
+Pn+SNsIEGZjT5Bvja4br5ON62vJNDoCHlDuztAOdth/uoWHBQI+GAmyR0T6mRrq4
+gCw41TMGjwgc+xwvLRLXZjqw16/3qDuEJWkLrLIjuaEsFldWeVd0zc7fE/XX92N0
+lY5WXua0SGFw8oWDdpGHo62tSoXpfcS9C4/MGORTAEFigYeZK/qdNsOW13oAGm4R
+p+VBc/GNsUAdvg9+WO5Vz/6gJLzSNVXWt6R4z+ksoktAPHILWWVP+7OqXzLgO1h+
+mC6PDOREjWhops5Iyl0=
+=2o5o
+-END PGP PUBLIC KEY BLOCK-




svn commit: r18888 - /dev/incubator/hawq/KEYS

2017-03-23 Thread huor
Author: huor
Date: Fri Mar 24 06:39:18 2017
New Revision: 1

Log:
Add Ruilong Huo (CODE SIGNING KEY) info

Modified:
dev/incubator/hawq/KEYS

Modified: dev/incubator/hawq/KEYS
==
--- dev/incubator/hawq/KEYS (original)
+++ dev/incubator/hawq/KEYS Fri Mar 24 06:39:18 2017
@@ -471,3 +471,62 @@ XkCUbhNwU+slXd46Xlt1PbYXipsMrlSn6/AoKn2v
 dUR+LUsJi54hCfW7WQv249kpR+BflURy+8I=
 =ilaS
 -END PGP PUBLIC KEY BLOCK-
+
+pub   4096R/1B8B6872 2017-01-10
+uid   [ultimate] Ruilong Huo (CODE SIGNING KEY) 
+sig 31B8B6872 2017-01-10  Ruilong Huo (CODE SIGNING KEY) 

+sub   4096R/EBE332AF 2017-01-10
+sig  1B8B6872 2017-01-10  Ruilong Huo (CODE SIGNING KEY) 

+
+-BEGIN PGP PUBLIC KEY BLOCK-
+Version: GnuPG v2
+
+mQINBFh0RswBEADRA5eV1MMT/+dM06pNrqDXNdLNl3PR7BJwNSfjM3PfkIAaVIB0
+r3VaRxRdeSCXxenrID412l3+mDIry80WM8QpP0U/xwCMHO2J3PIScOQfEIctI5Pf
+XVMP0Y0K1au2Lu8CUU/7i5xP7nrl+3A2zJ7R2G3UZGyIsLzUQy+1TewTa7pdaywa
+KKAVLAbItCaghXVInZ1SqOSZ3NufOM/ixFIioEEiCHpB/UsEJ+adEgZw+Wxnxl2C
++gg7cERsH0j4tmdI87roEWNjhSaqm1WPB31cBckOYkjQITXnFj76fuGs5v+u3IZe
+paJ/G5CcYyer5/zGT8omht3gzMaXDedbi6A6X7WozkLx9SLq9uQnPDuVYvzSa37u
+1Iyqe1Va5oDPvu7i71V6QKZHvSWI4BdKucOPNTkdnKWKYJXA7av38SQMklOPvGz1
+4QkxysjuJUFuehSgw9HKQ4inwn5C5EwI4+SXTEKwBHeMNSPD4y9bqzh78Ct2k07G
+eaqjTeSiSiJdbySsH3C47ITqpeTSHSEm4+ZgkWuFEBO4VRAEG0QIadmNFBNU3YmN
+Fz1xKZv/vd8refUeobyWHxSVEizABZHkQ6pab0apbYhkGLzqiuWmC2ZsUrphCp4y
+3Nqce/l+DsLUZTJNGZqTuauQiu9MYTZeS0OQPw9qox7WVEDjN27Uq3UepwARAQAB
+tDBSdWlsb25nIEh1byAoQ09ERSBTSUdOSU5HIEtFWSkgPGh1b3JAYXBhY2hlLm9y
+Zz6JAjcEEwEKACEFAlh0RswCGwMFCwkIBwMFFQoJCAsFFgIDAQACHgECF4AACgkQ
+j+zaiBuLaHJNoRAAh1d04GhpjkA0PKiccga1EbqSc2bo6pKyO6pJ8f+Vbqla1NXt
+rgAXaNm5JPhFnr//dkbnMKTbpGpIyUv24vHtRKVSVckhFB7r1pq6/faEqZ7nkh14
+rNBM9G/Z5pX3iAy/ooPYi0S2RtYwvme09DefOsCRSnwJ2AZQoD7OQJWDsU82Ua0a
+hl6F0I34JZcXF6zR9aX0AM4v7cm1yskxeHogCjQwzkZOan/S3JLNwMCsyZSz85j8
+4RT3zJ0/wQWQUy5ICv/m/+zyqNXtjkhXTF833zFlIYxmnCMLIuPQxJmrkJGm3opZ
+v7oFA3ZoZf4CjhUUfdT6OKn55/+eSOUWZz4xPwLVz0dcw2uUOkE7ZmuULSoz1jem
+3FiHOa+560Zazc7uEO8Pq6PbIRKd0NLdFJ7gAQjj9LQ511xIENMBh8TgqQZn7tm4
+8KYMR0N3ewdK3WBlxgh16JxkPZOWAKM7ROAjocMD6HoVdZjkQinwY8N/mLdKQoDv
+kKxJsqXyn8Y1o4V6KdvsO/kIcBjgXJXq24HKY4UUIhi4LyvCJ6pMuABFdzPRSCCv
+FTxnRz4XFI1ZFyeXUzUsV8dIjyYSusmFv7j5/OgKSZkAzB6HNTKWbY8Sxq4W1xx2
+WV/6zbFDc1NOhiw87MLiWFgsToe6xnFKxMKeC6BYGWVpGmV6rcR3lYVqKoK5Ag0E
+WHRGzAEQANlD6b1CkfNJmAcT89UxNX4Y7kvTmRz93o3SZoNYexSLKxdNgali5K1c
+yXWKoRmWoFsgU67KXvPBq5Qq1M6WYY3mDHzYdb68y6oQScifZ3FZo4+QXwyqI7bV
+3FJ/FnNCim/AhbCTGFQoTOPX+5YMYU1diyfOQp/X/vq+XjlaQ9VBG00hbFK7a/WL
+7SjLpfLneaDDXG5y0WS94p+8K91Haatnw6J1ldmiUCmBItG2vJZ4y5x80DWHvwlw
+ISuB89PoCjzpeufH5WtNTL3CDt7V9AkXUdr7v1jRpRiNnlCp8vN8lcsr4GzQe/mL
+1ClsjedDcCC7kw6vY+x5b50Vkk9o10JNEyWgDKi5kr2BaGtCZwpvXZGbTpwpeBKm
+FOQFoEwjWpu9bY4WGdyfEUn1GmyYG17Rvticj14oJlXbSHmT/DER3S5VbXMHuUbn
+soJJJXgs9rsTpl3iXE5Ly6ZFPRyYmHaRMKa6uWs26PS3KQLGyvoGFUxYk94ZuZWD
+B5+kJNlV1HgVRi1gzfuDbKBiKi14NIDCqL5mEeQ0PWarTbmQg8KYtqj7RYnLnoLg
+DSDHotH4IjmQ1ngQjEBPbtsIX4YdDBVbRNAWJ49sVjxs9veeqtxjPVmwefXCieDX
+2U2OXeGn1nK2ivGWbf03kn8yldDs5qHCCLVvmzmDnZa1Pz/ekWBZABEBAAGJAh8E
+GAEKAAkFAlh0RswCGwwACgkQj+zaiBuLaHKD1RAAjYu+vDdkqlduqf/ui+aNGWWY
+cgfSqbL0jWRvDAiBUWMjBzPXrnJQ/hPk07rGqLUs/QZ62ZuN+NDIn5ndtngBMwGY
+OrM7cbo/cvsRaitFhRoMMh9Egm05izNlFlso6+bOi8pQWTqFJ9JD+8rLqv8IoRo3
+rxpWW9+y8qqKRjLXuuALka51gP6tFambhT17vOV3HS/c6QkvAHC9aKwP25XiK536
+3dTZ8c99EZ9EeTG4EjUnsmZdWLqlk/mQ6JRdoQvbkWrb0IlaFVUpaH5zG++HhW8c
+Xu4/gdHQxJbPNO6E+ZJT86iYR/oUtongjj5kd3ECaQXJjSV7kQeUUq1uhqkJtyof
+YZscpNv7DnbskuDoJ7/VZOH/Ym1Y5ECFVLHhOOj9JfDvMst4Lu9lfyZOsBJkgWZN
+Pn+SNsIEGZjT5Bvja4br5ON62vJNDoCHlDuztAOdth/uoWHBQI+GAmyR0T6mRrq4
+gCw41TMGjwgc+xwvLRLXZjqw16/3qDuEJWkLrLIjuaEsFldWeVd0zc7fE/XX92N0
+lY5WXua0SGFw8oWDdpGHo62tSoXpfcS9C4/MGORTAEFigYeZK/qdNsOW13oAGm4R
+p+VBc/GNsUAdvg9+WO5Vz/6gJLzSNVXWt6R4z+ksoktAPHILWWVP+7OqXzLgO1h+
+mC6PDOREjWhops5Iyl0=
+=2o5o
+-END PGP PUBLIC KEY BLOCK-




[1/2] incubator-hawq git commit: HAWQ-1371. Fix QE process hang in shared input scan

2017-03-10 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 9d0ea4699 -> 914cbc1ab


HAWQ-1371. Fix QE process hang in shared input scan


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/914cbc1a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/914cbc1a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/914cbc1a

Branch: refs/heads/master
Commit: 914cbc1ab43abd421dfe0e139c23ede03dd40f95
Parents: 61780e9
Author: amyrazz44 
Authored: Tue Mar 7 16:22:18 2017 +0800
Committer: Ruilong Huo 
Committed: Fri Mar 10 15:59:16 2017 +0800

--
 src/backend/executor/nodeShareInputScan.c | 39 --
 1 file changed, 30 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/914cbc1a/src/backend/executor/nodeShareInputScan.c
--
diff --git a/src/backend/executor/nodeShareInputScan.c 
b/src/backend/executor/nodeShareInputScan.c
index 0f08848..049943b 100644
--- a/src/backend/executor/nodeShareInputScan.c
+++ b/src/backend/executor/nodeShareInputScan.c
@@ -40,7 +40,6 @@
 
 #include "postgres.h"
 
-#include "access/xact.h"
 #include "cdb/cdbvars.h"
 #include "executor/executor.h"
 #include "executor/nodeShareInputScan.h"
@@ -641,10 +640,6 @@ read_retry:
goto read_retry;
else
{
-   if(fd >= 0)
-   {
-   gp_retry_close(fd);
-   }
elog(ERROR, "could not read from fifo: %m");
}
Assert(!"Never be here");
@@ -664,10 +659,6 @@ write_retry:
goto write_retry;
else
{
-   if(fd >= 0)
-   {
-   gp_retry_close(fd);
-   }
elog(ERROR, "could not write to fifo: %m");
}
 
@@ -751,6 +742,21 @@ shareinput_reader_waitready(int share_id, PlanGenerator 
planGen)
{
CHECK_FOR_INTERRUPTS();
 
+   /*
+* Readers won't wait for data writing done notification from 
writer if transaction is
+* aborting. Writer may fail to send data writing done 
notification to readers in two
+* cases:
+*
+*1. The transaction is aborted due to interrupts or 
exceptions, i.e., user cancels
+*   query, division by zero on some segment
+*
+*2. Logic errors in reader which incur its unexpected 
exit, i.e., segmentation fault
+*/
+   if (IsAbortInProgress())
+   {
+   break;
+   }
+
MPP_FD_ZERO(&rset);
MPP_FD_SET(pctxt->readyfd, &rset);
 
@@ -888,6 +894,21 @@ writer_wait_for_acks(ShareInput_Lk_Context *pctxt, int 
share_id, int xslice)
{
CHECK_FOR_INTERRUPTS();
 
+   /*
+* Writer won't wait for ack notification from readers if 
transaction is
+* aborting. Readers may fail to send ack notification to 
writer in two
+* cases:
+*
+*1. The transaction is aborted due to interrupts or 
exceptions, i.e., user cancels
+*   query, division by zero on some segment
+*
+*2. Logic errors in reader which incur its unexpected 
exit, i.e., segmentation fault
+*/
+   if (IsAbortInProgress())
+   {
+   break;
+   }
+
MPP_FD_ZERO(&rset);
MPP_FD_SET(pctxt->donefd, &rset);
 



[2/2] incubator-hawq git commit: Revert "HAWQ-1342. Fixed QE process hang in shared input scan on segment node"

2017-03-10 Thread huor
Revert "HAWQ-1342. Fixed QE process hang in shared input scan on segment node"

   The fix introduce hang regression in shared input scan query as described in 
HAWQ-1371


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/61780e99
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/61780e99
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/61780e99

Branch: refs/heads/master
Commit: 61780e999c508973e3dccd460e80f47853928277
Parents: 9d0ea46
Author: amyrazz44 
Authored: Tue Mar 7 16:02:39 2017 +0800
Committer: Ruilong Huo 
Committed: Fri Mar 10 15:59:16 2017 +0800

--
 src/backend/executor/nodeShareInputScan.c | 49 +++---
 1 file changed, 28 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/61780e99/src/backend/executor/nodeShareInputScan.c
--
diff --git a/src/backend/executor/nodeShareInputScan.c 
b/src/backend/executor/nodeShareInputScan.c
index 74dbcb5..0f08848 100644
--- a/src/backend/executor/nodeShareInputScan.c
+++ b/src/backend/executor/nodeShareInputScan.c
@@ -40,6 +40,7 @@
 
 #include "postgres.h"
 
+#include "access/xact.h"
 #include "cdb/cdbvars.h"
 #include "executor/executor.h"
 #include "executor/nodeShareInputScan.h"
@@ -640,6 +641,10 @@ read_retry:
goto read_retry;
else
{
+   if(fd >= 0)
+   {
+   gp_retry_close(fd);
+   }
elog(ERROR, "could not read from fifo: %m");
}
Assert(!"Never be here");
@@ -659,6 +664,10 @@ write_retry:
goto write_retry;
else
{
+   if(fd >= 0)
+   {
+   gp_retry_close(fd);
+   }
elog(ERROR, "could not write to fifo: %m");
}
 
@@ -785,14 +794,7 @@ shareinput_reader_waitready(int share_id, PlanGenerator 
planGen)
{
int save_errno = errno;
elog(LOG, "SISC READER (shareid=%d, slice=%d): Wait 
ready try again, errno %d ... ",
-   share_id, 
currentSliceId, save_errno);
-   if(save_errno == EBADF)
-   {
-   /* The file description is invalid, maybe this 
FD has been already closed by writer in some cases
-* we need to break here to avoid endless loop 
and continue to run CHECK_FOR_INTERRUPTS.
-*/
-   break;
-   }
+   share_id, currentSliceId, save_errno);
}
}
return (void *) pctxt;
@@ -923,12 +925,9 @@ writer_wait_for_acks(ShareInput_Lk_Context *pctxt, int 
share_id, int xslice)
int save_errno = errno;
elog(LOG, "SISC WRITER (shareid=%d, slice=%d): notify 
still wait for an answer, errno %d",
share_id, currentSliceId, save_errno);
-   if(save_errno == EBADF)
-   {
-   /* The file description is invalid, maybe this 
FD has been already closed by writer in some cases
-* we need to break here to avoid endless loop 
and continue to run CHECK_FOR_INTERRUPTS.
-*/
-   break;
+   /*if error(except EINTR) happens in select, we just 
return to avoid endless loop*/
+   if(errno != EINTR){
+   return;
}
}
}
@@ -980,6 +979,21 @@ shareinput_writer_waitdone(void *ctxt, int share_id, int 
nsharer_xslice)
while(ack_needed > 0)
{
CHECK_FOR_INTERRUPTS();
+
+   /*
+* Writer won't wait for data reading done notification from 
readers if transaction is
+* aborting. Readers may fail to send data reading done 
notification to writer in two
+* cases:
+*
+*1. The transaction is aborted due to interrupts or 
exceptions, i.e., user cancels
+*   query, division by zero on some segment
+*
+*2. Logic errors in reader which incur its unexpected 
exit, i.e., segmentation fault
+*/
+   if (IsAbortInProgress())
+   {
+   break;
+   }

MPP_FD_ZERO(&rset);
MPP_FD_SET(pctxt->donefd, &rset);
@@

incubator-hawq git commit: HAWQ-1323. Fix access mode of files in adding license header added to each source file in test directory

2017-02-14 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 55e5ab5a3 -> 9a86ed8c9


HAWQ-1323. Fix access mode of files in adding license header added to each 
source file in test directory


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/9a86ed8c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/9a86ed8c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/9a86ed8c

Branch: refs/heads/master
Commit: 9a86ed8c9356fc2133d36421116961b7a8ca2265
Parents: 55e5ab5
Author: amyrazz44 
Authored: Tue Feb 14 17:00:37 2017 +0800
Committer: Ruilong Huo 
Committed: Tue Feb 14 17:18:53 2017 +0800

--
 src/test/bench/create.sh | 0
 src/test/bench/runwisc.sh| 0
 src/test/bench/wholebench.sh | 0
 src/test/feature/PreparedStatement/TestPreparedStatement.cpp | 0
 src/test/feature/UDF/TestUDF.cpp | 0
 src/test/feature/UDF/lib/function.c  | 0
 src/test/feature/ao/TestAoSnappy.cpp | 0
 src/test/feature/parallel-run-feature-test.sh| 0
 src/test/feature/partition/test_partition.cpp| 0
 src/test/feature/test_main.cpp   | 0
 src/test/locale/sort-test.pl | 0
 src/test/locale/sort-test.py | 0
 src/test/mb/mbregress.sh | 0
 src/test/performance/runtests.pl | 0
 src/test/performance/start-pgsql.sh  | 0
 src/test/regress/atmsort.pl  | 0
 src/test/regress/checkinc.py | 0
 src/test/regress/dld.pl  | 0
 src/test/regress/explain.pl  | 0
 src/test/regress/get_ereport.pl  | 0
 src/test/regress/gpdiff.pl   | 0
 src/test/regress/gpexclude.pl| 0
 src/test/regress/gpsourcify.pl   | 0
 src/test/regress/gpstringsubs.pl | 0
 src/test/regress/gptorment.pl| 0
 src/test/regress/maketestschedule.py | 0
 src/test/regress/regressplans.sh | 0
 src/test/regress/upg2_wizard.pl  | 0
 src/test/unit/cmockery/cmockery.h| 0
 src/test/unit/mock/mocker.py | 0
 30 files changed, 0 insertions(+), 0 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/9a86ed8c/src/test/bench/create.sh
--
diff --git a/src/test/bench/create.sh b/src/test/bench/create.sh
old mode 100644
new mode 100755

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/9a86ed8c/src/test/bench/runwisc.sh
--
diff --git a/src/test/bench/runwisc.sh b/src/test/bench/runwisc.sh
old mode 100644
new mode 100755

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/9a86ed8c/src/test/bench/wholebench.sh
--
diff --git a/src/test/bench/wholebench.sh b/src/test/bench/wholebench.sh
old mode 100644
new mode 100755

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/9a86ed8c/src/test/feature/PreparedStatement/TestPreparedStatement.cpp
--
diff --git a/src/test/feature/PreparedStatement/TestPreparedStatement.cpp 
b/src/test/feature/PreparedStatement/TestPreparedStatement.cpp
old mode 100644
new mode 100755

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/9a86ed8c/src/test/feature/UDF/TestUDF.cpp
--
diff --git a/src/test/feature/UDF/TestUDF.cpp b/src/test/feature/UDF/TestUDF.cpp
old mode 100644
new mode 100755

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/9a86ed8c/src/test/feature/UDF/lib/function.c
--
diff --git a/src/test/feature/UDF/lib/function.c 
b/src/test/feature/UDF/lib/function.c
old mode 100644
new mode 100755

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/9a86ed8c/src/test/feature/ao/TestAoSnappy.cpp
--
diff --git a/src/test/feature/ao/TestAoSnappy.cpp 
b/src/test/feature/ao/TestAoSnappy.cpp
old mode 100644
new mode 100755

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/

[1/2] incubator-hawq git commit: HAWQ-1323. Add license header added to each source file in test directory

2017-02-14 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 9918443b6 -> 492b1a782


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/492b1a78/src/test/feature/query/test_insert.cpp
--
diff --git a/src/test/feature/query/test_insert.cpp 
b/src/test/feature/query/test_insert.cpp
index 1e05b9c..dc1e361 100644
--- a/src/test/feature/query/test_insert.cpp
+++ b/src/test/feature/query/test_insert.cpp
@@ -1,3 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 #include 
 #include 
 #include 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/492b1a78/src/test/feature/query/test_nested_case_null.cpp
--
diff --git a/src/test/feature/query/test_nested_case_null.cpp 
b/src/test/feature/query/test_nested_case_null.cpp
index 3d5894f..5fd547f 100644
--- a/src/test/feature/query/test_nested_case_null.cpp
+++ b/src/test/feature/query/test_nested_case_null.cpp
@@ -1,3 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 #include 
 #include 
 #include 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/492b1a78/src/test/feature/query/test_parser.cpp
--
diff --git a/src/test/feature/query/test_parser.cpp 
b/src/test/feature/query/test_parser.cpp
index c2aec26..597dbf1 100644
--- a/src/test/feature/query/test_parser.cpp
+++ b/src/test/feature/query/test_parser.cpp
@@ -1,3 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 #include 
 #include 
 #include 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/492b1a78/src/test/feature/query/test_polymorphism.cpp
--
diff --git a/src/test/feature/query/test_polymorphism.cpp 
b/src/test/feature/query/test_polymorphism.cpp
index 42d5a6a..21bb663 100644
--- a/src/test/feature/query/test_polymorphism.cpp
+++ b/src/test/feature/query/test_polymorphism.cpp
@@ -1,3 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS I

[2/2] incubator-hawq git commit: HAWQ-1323. Add license header added to each source file in test directory

2017-02-14 Thread huor
HAWQ-1323. Add license header added to each source file in test directory


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/492b1a78
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/492b1a78
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/492b1a78

Branch: refs/heads/master
Commit: 492b1a782351b8cf1701703773a6cacc6bd1a0cb
Parents: 9918443
Author: amyrazz44 
Authored: Tue Feb 14 10:08:03 2017 +0800
Committer: amyrazz44 
Committed: Tue Feb 14 16:18:46 2017 +0800

--
 pom.xml | 34 +++-
 src/test/amsd/testframe.h   | 18 +++
 src/test/bench/create.sh| 18 +++
 src/test/bench/runwisc.sh   | 18 +++
 src/test/bench/wholebench.sh| 18 +++
 src/test/examples/testlibpq.c   | 18 +++
 src/test/examples/testlibpq2.c  | 18 +++
 src/test/examples/testlibpq3.c  | 18 +++
 src/test/examples/testlibpq4.c  | 18 +++
 src/test/examples/testlo.c  | 18 +++
 src/test/feature/ExternalSource/lib/function.c  | 18 +++
 .../feature/ExternalSource/test_errortbl.cpp| 18 +++
 .../ExternalSource/test_external_oid.cpp| 18 +++
 src/test/feature/ExternalSource/test_exttab.cpp | 18 +++
 .../feature/ManagementTool/test_hawq_register.h | 18 +++
 .../test_hawq_register_partition.cpp| 18 +++
 .../test_hawq_register_rollback.cpp | 18 +++
 .../test_hawq_register_usage1.cpp   | 18 +++
 .../test_hawq_register_usage2_case1.cpp | 18 +++
 .../test_hawq_register_usage2_case2.cpp | 18 +++
 .../PreparedStatement/TestPreparedStatement.cpp | 18 +++
 src/test/feature/UDF/TestUDF.cpp| 18 +++
 src/test/feature/UDF/lib/function.c | 18 +++
 src/test/feature/ao/TestAoSnappy.cpp| 18 +++
 src/test/feature/catalog/test_alter_owner.cpp   | 18 +++
 src/test/feature/catalog/test_alter_table.cpp   | 18 +++
 src/test/feature/catalog/test_create_table.cpp  | 18 +++
 src/test/feature/catalog/test_guc.cpp   | 18 +++
 src/test/feature/catalog/test_type.cpp  | 18 +++
 src/test/feature/ddl/test_database.cpp  | 18 +++
 src/test/feature/lib/command.cpp| 18 +++
 src/test/feature/lib/command.h  | 18 +++
 src/test/feature/lib/data_gen.cpp   | 18 +++
 src/test/feature/lib/data_gen.h | 18 +++
 src/test/feature/lib/file_replace.cpp   | 18 +++
 src/test/feature/lib/file_replace.h | 18 +++
 src/test/feature/lib/hawq_config.cpp| 18 +++
 src/test/feature/lib/hawq_config.h  | 18 +++
 src/test/feature/lib/hawq_scp.cpp   | 18 +++
 src/test/feature/lib/hawq_scp.h | 18 +++
 src/test/feature/lib/hdfs_config.cpp| 18 +++
 src/test/feature/lib/hdfs_config.h  | 18 +++
 src/test/feature/lib/psql.cpp   | 18 +++
 src/test/feature/lib/psql.h | 18 +++
 src/test/feature/lib/sql_util.cpp   | 18 +++
 src/test/feature/lib/sql_util.h | 18 +++
 src/test/feature/lib/string_util.cpp| 18 +++
 src/test/feature/lib/string_util.h  | 18 +++
 src/test/feature/lib/xml_parser.cpp | 18 +++
 src/test/feature/lib/xml_parser.h   | 18 +++
 src/test/feature/lib/yarn_config.cpp| 18 +++
 src/test/feature/lib/yarn_config.h  | 18 +++
 src/test/feature/parallel-run-feature-test.sh   | 20 +++-
 src/test/feature/parquet/test_parquet.cpp   | 18 +++
 src/test/feature/partition/test_partition.cpp   | 18 +++
 src/test/feature/planner/test_subplan.cpp   | 18 +++
 src/test/feature/query/test_aggregate.cpp   | 18 +++
 .../query/test_create_type_composite.cpp| 18 +++
 src/test/feature/query/test_gp_dist_random.cpp  | 18 +++
 .../feature/query/test_information_schema.cpp   | 18 +++
 src/test/feature/query/test_insert.cpp  | 18 +++
 .../feature/query/test_nested_case_null.cpp | 18 +++
 src/test/feature/query/test_parser.cpp  | 18 +++
 src/test/feature/query/test_polymorphism.cpp| 18 +++
 src/test/feature/query/test_portal.cpp  | 18 +++
 src/test/feature/query/test_prepare.cpp

incubator-hawq git commit: HAWQ-1319. Add ASF header to java files and remove jar files in function suite

2017-02-10 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 63c856280 -> d7e6c196f


HAWQ-1319. Add ASF header to java files and remove jar files in function suite


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/d7e6c196
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/d7e6c196
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/d7e6c196

Branch: refs/heads/master
Commit: d7e6c196fcfb4badd50feab330ad85cf717cb171
Parents: 63c8562
Author: Ruilong Huo 
Authored: Fri Feb 10 13:34:21 2017 +0800
Committer: Ruilong Huo 
Committed: Fri Feb 10 17:25:10 2017 +0800

--
 src/test/feature/UDF/TestUDF.cpp |  26 ++
 src/test/feature/UDF/lib/function.c  |  18 ++
 src/test/feature/UDF/sql/PLJavaAdd.jar   | Bin 654 -> 0 bytes
 src/test/feature/UDF/sql/PLJavaAdd.java  |  18 ++
 src/test/feature/UDF/sql/PLJavauAdd.jar  | Bin 657 -> 0 bytes
 src/test/feature/UDF/sql/PLJavauAdd.java |  18 ++
 6 files changed, 80 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/d7e6c196/src/test/feature/UDF/TestUDF.cpp
--
diff --git a/src/test/feature/UDF/TestUDF.cpp b/src/test/feature/UDF/TestUDF.cpp
index 95509fb..f79843b 100755
--- a/src/test/feature/UDF/TestUDF.cpp
+++ b/src/test/feature/UDF/TestUDF.cpp
@@ -1,3 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 #include "gtest/gtest.h"
 
 #include "lib/command.h"
@@ -200,6 +218,10 @@ TEST_F(TestUDF, TestUDFPljava)
// run test if pljava language is enabled
if (util.getQueryResult("SELECT lanname FROM pg_language WHERE lanname 
= 'java'") == "java")
{
+   // prepare jar files
+   hawq::test::Command cmd("cd " + d_feature_test_root + 
"/UDF/sql/; javac PLJavaAdd.java; jar cf PLJavaAdd.jar PLJavaAdd.class");
+   cmd.run();
+
// copy jar files over hawq cluster
std::string query = "SELECT string_agg('-h ' || hostname, ' ' 
ORDER BY hostname) FROM gp_segment_configuration;";
std::string hosts = util.getQueryResult(query);
@@ -230,6 +252,10 @@ TEST_F(TestUDF, TestUDFPljavau)
// run test if pljavau language is enabled
if (util.getQueryResult("SELECT lanname FROM pg_language WHERE lanname 
= 'javau'") == "javau")
{
+   // prepare jar files
+   hawq::test::Command cmd("cd " + d_feature_test_root + 
"/UDF/sql/; javac PLJavauAdd.java; jar cf PLJavauAdd.jar PLJavauAdd.class");
+   cmd.run();
+
// copy jar files over hawq cluster
std::string query = "SELECT string_agg('-h ' || hostname, ' ' 
ORDER BY hostname) FROM gp_segment_configuration;";
std::string hosts = util.getQueryResult(query);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/d7e6c196/src/test/feature/UDF/lib/function.c
--
diff --git a/src/test/feature/UDF/lib/function.c 
b/src/test/feature/UDF/lib/function.c
index d9a842b..b2d4b81 100755
--- a/src/test/feature/UDF/lib/function.c
+++ b/src/test/feature/UDF/lib/function.c
@@ -1,3 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License f

incubator-hawq git commit: HAWQ-1248. Merge Dockerfiles for HAWQ Dev into HAWQ code base

2017-01-16 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master ec7b4d9e9 -> 368dbc9e6


HAWQ-1248. Merge Dockerfiles for HAWQ Dev into HAWQ code base


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/368dbc9e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/368dbc9e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/368dbc9e

Branch: refs/heads/master
Commit: 368dbc9e64a2e62061ea47a0b9c7b15589dad457
Parents: ec7b4d9
Author: Richard Guo 
Authored: Tue Jan 3 17:12:59 2017 +0800
Committer: Ruilong Huo 
Committed: Tue Jan 17 10:57:02 2017 +0800

--
 contrib/hawq-docker/Makefile| 222 +++
 contrib/hawq-docker/README.md   |  97 
 .../centos6-docker/hawq-dev/Dockerfile  | 123 ++
 .../centos6-docker/hawq-test/Dockerfile |  40 
 .../centos6-docker/hawq-test/conf/core-site.xml |  24 ++
 .../centos6-docker/hawq-test/conf/hadoop-env.sh | 110 +
 .../centos6-docker/hawq-test/entrypoint.sh  |  34 +++
 .../centos6-docker/hawq-test/start-hdfs.sh  |  39 
 .../centos7-docker/hawq-dev/Dockerfile  |  75 +++
 .../centos7-docker/hawq-test/Dockerfile |  40 
 .../centos7-docker/hawq-test/conf/core-site.xml |  24 ++
 .../centos7-docker/hawq-test/conf/hadoop-env.sh | 110 +
 .../centos7-docker/hawq-test/entrypoint.sh  |  33 +++
 .../centos7-docker/hawq-test/start-hdfs.sh  |  39 
 14 files changed, 1010 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/368dbc9e/contrib/hawq-docker/Makefile
--
diff --git a/contrib/hawq-docker/Makefile b/contrib/hawq-docker/Makefile
new file mode 100644
index 000..120ebe2
--- /dev/null
+++ b/contrib/hawq-docker/Makefile
@@ -0,0 +1,222 @@
+#!/usr/bin/make all
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+THIS_MAKEFILE_PATH := $(abspath $(lastword $(MAKEFILE_LIST)))
+TOP_DIR := $(abspath $(dir ${THIS_MAKEFILE_PATH}))
+NDATANODES := 3
+CUR_DATANODE := 1
+OS_VERSION := centos7
+# Do not use underscore "_" in CLUSTER_ID
+CLUSTER_ID := $(OS_VERSION)
+# Monut this local directory to /data in data container and share with other 
containers
+LOCAL := 
+# networks used in docker
+NETWORK := $(CLUSTER_ID)_hawq_network
+
+all: 
+   @echo " Usage:"
+   @echo "To setup a build and test environment: make run"
+   @echo "To start all containers:   make start"
+   @echo "To stop all containers:make stop"
+   @echo "To remove hdfs containers: make clean"
+   @echo "To remove all containers:  make 
distclean"
+   @echo ""
+   @echo "To build images locally:   make build"
+   @echo "To pull latest images: make pull"
+
+build:
+   @make -f $(THIS_MAKEFILE_PATH) build-hawq-dev-$(OS_VERSION)
+   @make -f $(THIS_MAKEFILE_PATH) build-hawq-test-$(OS_VERSION)
+   @echo "Build Images Done!"
+
+build-hawq-dev-$(OS_VERSION): 
$(TOP_DIR)/$(OS_VERSION)-docker/hawq-dev/Dockerfile
+   @echo build hawq-dev:$(OS_VERSION) image
+   docker build -t hawq/hawq-dev:$(OS_VERSION) 
$(TOP_DIR)/$(OS_VERSION)-docker/hawq-dev/
+
+build-hawq-test-$(OS_VERSION): 
$(TOP_DIR)/$(OS_VERSION)-docker/hawq-test/Dockerfile
+   @echo build hawq-test:$(OS_VERSION) image
+   docker build -t hawq/hawq-test:$(OS_VERSION) 
$(TOP_DIR)/$(OS_VERSION)-docker/hawq-test/
+
+create-data-container:
+   @echo create ${CLUSTER_ID}-data container
+   @if [ ! -z "$(LOCAL)" -a ! -d "$(LOCAL)" ]; then \
+   echo "LOCAL must be set to a directory!"; \
+   exit 1; \
+   fi
+   @if [ -z "`docker ps -a --filter="name=${CLUSTER_ID}-data$$" | grep -v 
CONTAINER`" ]; then \
+   if [ -z "$(LOCAL)" ]; then \
+   docker create -v /data --name=${CLUSTE

[2/3] incubator-hawq git commit: Revert "HAWQ-1248. Merge Dockerfiles for HAWQ Dev into HAWQ code base."

2017-01-13 Thread huor
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/1cb29096/contrib/hawq-docker/centos6-docker/hawq-test/conf/log4j.properties
--
diff --git a/contrib/hawq-docker/centos6-docker/hawq-test/conf/log4j.properties 
b/contrib/hawq-docker/centos6-docker/hawq-test/conf/log4j.properties
deleted file mode 100644
index c901ab1..000
--- a/contrib/hawq-docker/centos6-docker/hawq-test/conf/log4j.properties
+++ /dev/null
@@ -1,291 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Define some default values that can be overridden by system properties
-hadoop.root.logger=INFO,console
-hadoop.log.dir=.
-hadoop.log.file=hadoop.log
-
-# Define the root logger to the system property "hadoop.root.logger".
-log4j.rootLogger=${hadoop.root.logger}, EventCounter
-
-# Logging Threshold
-log4j.threshold=ALL
-
-# Null Appender
-log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
-
-#
-# Rolling File Appender - cap space usage at 5gb.
-#
-hadoop.log.maxfilesize=256MB
-hadoop.log.maxbackupindex=20
-log4j.appender.RFA=org.apache.log4j.RollingFileAppender
-log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
-
-log4j.appender.RFA.MaxFileSize=${hadoop.log.maxfilesize}
-log4j.appender.RFA.MaxBackupIndex=${hadoop.log.maxbackupindex}
-
-log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-# Debugging Pattern format
-#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} 
(%F:%M(%L)) - %m%n
-
-
-#
-# Daily Rolling File Appender
-#
-
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}
-
-# Rollover at midnight
-log4j.appender.DRFA.DatePattern=.-MM-dd
-
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-# Debugging Pattern format
-#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} 
(%F:%M(%L)) - %m%n
-
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this 
-#
-
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p 
%c{2}: %m%n
-
-#
-# TaskLog Appender
-#
-
-#Default values
-hadoop.tasklog.taskid=null
-hadoop.tasklog.iscleanup=false
-hadoop.tasklog.noKeepSplits=4
-hadoop.tasklog.totalLogFileSize=100
-hadoop.tasklog.purgeLogSplits=true
-hadoop.tasklog.logsRetainHours=12
-
-log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender
-log4j.appender.TLA.taskId=${hadoop.tasklog.taskid}
-log4j.appender.TLA.isCleanup=${hadoop.tasklog.iscleanup}
-log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}
-
-log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
-log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-
-#
-# HDFS block state change log from block manager
-#
-# Uncomment the following to suppress normal block state change
-# messages from BlockManager in NameNode.
-#log4j.logger.BlockStateChange=WARN
-
-#
-#Security appender
-#
-hadoop.security.logger=INFO,NullAppender
-hadoop.security.log.maxfilesize=256MB
-hadoop.security.log.maxbackupindex=20
-log4j.category.SecurityLogger=${hadoop.security.logger}
-hadoop.security.log.file=SecurityAuth-${user.name}.audit
-log4j.appender.RFAS=org.apache.log4j.RollingFileAppender 
-log4j.appender.RFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
-log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout
-log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.appender.RFAS.MaxFileSize=${hadoop.security.log.maxfilesize}
-log4j.appender.RFAS.MaxBackupIndex=${hadoop.security.log.maxbackupindex}
-
-#
-# Daily Rolling Security appender
-#
-log4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender 
-log4j.appender.DRFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
-log4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout
-log4j.a

[3/3] incubator-hawq git commit: Revert "HAWQ-1248. Merge Dockerfiles for HAWQ Dev into HAWQ code base."

2017-01-13 Thread huor
Revert "HAWQ-1248. Merge Dockerfiles for HAWQ Dev into HAWQ code base."

This reverts commit 440ce595a2298ac9be16973f0c7c4c358ddb2cd0.

Reason for revert: RAT check fail with unapproved license in some of the files 
added


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/1cb29096
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/1cb29096
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/1cb29096

Branch: refs/heads/master
Commit: 1cb29096ca89f4ed6f54ab795dfad518131969f2
Parents: 440ce59
Author: Ruilong Huo 
Authored: Fri Jan 13 19:38:13 2017 +0800
Committer: Ruilong Huo 
Committed: Sat Jan 14 11:40:11 2017 +0800

--
 contrib/hawq-docker/Makefile| 222 --
 contrib/hawq-docker/README.md   |  97 ---
 .../centos6-docker/hawq-dev/Dockerfile  | 123 
 .../centos6-docker/hawq-test/Dockerfile |  40 ---
 .../hawq-test/conf/capacity-scheduler.xml   | 134 -
 .../hawq-test/conf/configuration.xsl|  40 ---
 .../hawq-test/conf/container-executor.cfg   |   4 -
 .../centos6-docker/hawq-test/conf/core-site.xml |  24 --
 .../hawq-test/conf/hadoop-env.cmd   |  92 --
 .../centos6-docker/hawq-test/conf/hadoop-env.sh | 110 ---
 .../hawq-test/conf/hadoop-metrics.properties|  75 -
 .../hawq-test/conf/hadoop-metrics2.properties   |  68 -
 .../hawq-test/conf/hadoop-policy.xml| 226 --
 .../centos6-docker/hawq-test/conf/hdfs-site.xml | 100 ---
 .../centos6-docker/hawq-test/conf/kms-acls.xml  | 135 -
 .../centos6-docker/hawq-test/conf/kms-env.sh|  55 
 .../hawq-test/conf/kms-log4j.properties |  38 ---
 .../centos6-docker/hawq-test/conf/kms-site.xml  | 173 ---
 .../hawq-test/conf/log4j.properties | 291 ---
 .../hawq-test/conf/mapred-env.cmd   |  20 --
 .../centos6-docker/hawq-test/conf/mapred-env.sh |  27 --
 .../hawq-test/conf/mapred-queues.xml.template   |  92 --
 .../hawq-test/conf/mapred-site.xml.template |  21 --
 .../centos6-docker/hawq-test/conf/slaves|   1 -
 .../hawq-test/conf/ssl-client.xml.example   |  80 -
 .../hawq-test/conf/ssl-server.xml.example   |  78 -
 .../centos6-docker/hawq-test/conf/yarn-env.cmd  |  60 
 .../centos6-docker/hawq-test/entrypoint.sh  |  34 ---
 .../centos6-docker/hawq-test/start-hdfs.sh  |  39 ---
 .../centos7-docker/hawq-dev/Dockerfile  |  75 -
 .../centos7-docker/hawq-test/Dockerfile |  40 ---
 .../hawq-test/conf/capacity-scheduler.xml   | 134 -
 .../hawq-test/conf/configuration.xsl|  40 ---
 .../hawq-test/conf/container-executor.cfg   |   4 -
 .../centos7-docker/hawq-test/conf/core-site.xml |  24 --
 .../hawq-test/conf/hadoop-env.cmd   |  92 --
 .../centos7-docker/hawq-test/conf/hadoop-env.sh | 110 ---
 .../hawq-test/conf/hadoop-metrics.properties|  75 -
 .../hawq-test/conf/hadoop-metrics2.properties   |  68 -
 .../hawq-test/conf/hadoop-policy.xml| 226 --
 .../centos7-docker/hawq-test/conf/hdfs-site.xml | 100 ---
 .../centos7-docker/hawq-test/conf/kms-acls.xml  | 135 -
 .../centos7-docker/hawq-test/conf/kms-env.sh|  55 
 .../hawq-test/conf/kms-log4j.properties |  38 ---
 .../centos7-docker/hawq-test/conf/kms-site.xml  | 173 ---
 .../hawq-test/conf/log4j.properties | 291 ---
 .../hawq-test/conf/mapred-env.cmd   |  20 --
 .../centos7-docker/hawq-test/conf/mapred-env.sh |  27 --
 .../hawq-test/conf/mapred-queues.xml.template   |  92 --
 .../hawq-test/conf/mapred-site.xml.template |  21 --
 .../centos7-docker/hawq-test/conf/slaves|   1 -
 .../hawq-test/conf/ssl-client.xml.example   |  80 -
 .../hawq-test/conf/ssl-server.xml.example   |  78 -
 .../centos7-docker/hawq-test/conf/yarn-env.cmd  |  60 
 .../centos7-docker/hawq-test/entrypoint.sh  |  33 ---
 .../centos7-docker/hawq-test/start-hdfs.sh  |  39 ---
 56 files changed, 4630 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/1cb29096/contrib/hawq-docker/Makefile
--
diff --git a/contrib/hawq-docker/Makefile b/contrib/hawq-docker/Makefile
deleted file mode 100644
index 120ebe2..000
--- a/contrib/hawq-docker/Makefile
+++ /dev/null
@@ -1,222 +0,0 @@
-#!/usr/bin/make all
-
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-#

[1/3] incubator-hawq git commit: Revert "HAWQ-1248. Merge Dockerfiles for HAWQ Dev into HAWQ code base." [Forced Update!]

2017-01-13 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 37d5c28c4 -> 1cb29096c (forced update)


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/1cb29096/contrib/hawq-docker/centos7-docker/hawq-test/conf/kms-site.xml
--
diff --git a/contrib/hawq-docker/centos7-docker/hawq-test/conf/kms-site.xml 
b/contrib/hawq-docker/centos7-docker/hawq-test/conf/kms-site.xml
deleted file mode 100644
index a810ca4..000
--- a/contrib/hawq-docker/centos7-docker/hawq-test/conf/kms-site.xml
+++ /dev/null
@@ -1,173 +0,0 @@
-
-
-
-
-  
-
-  
-hadoop.kms.key.provider.uri
-jceks://file@/${user.home}/kms.keystore
-
-  URI of the backing KeyProvider for the KMS.
-
-  
-
-  
-hadoop.security.keystore.JavaKeyStoreProvider.password
-none
-
-  If using the JavaKeyStoreProvider, the password for the keystore file.
-
-  
-
-  
-
-  
-hadoop.kms.cache.enable
-true
-
-  Whether the KMS will act as a cache for the backing KeyProvider.
-  When the cache is enabled, operations like getKeyVersion, getMetadata,
-  and getCurrentKey will sometimes return cached data without consulting
-  the backing KeyProvider. Cached values are flushed when keys are deleted
-  or modified.
-
-  
-
-  
-hadoop.kms.cache.timeout.ms
-60
-
-  Expiry time for the KMS key version and key metadata cache, in
-  milliseconds. This affects getKeyVersion and getMetadata.
-
-  
-
-  
-hadoop.kms.current.key.cache.timeout.ms
-3
-
-  Expiry time for the KMS current key cache, in milliseconds. This
-  affects getCurrentKey operations.
-
-  
-
-  
-
-  
-hadoop.kms.audit.aggregation.window.ms
-1
-
-  Duplicate audit log events within the aggregation window (specified in
-  ms) are quashed to reduce log traffic. A single message for aggregated
-  events is printed at the end of the window, along with a count of the
-  number of aggregated events.
-
-  
-
-  
-
-  
-hadoop.kms.authentication.type
-simple
-
-  Authentication type for the KMS. Can be either "simple"
-  or "kerberos".
-
-  
-
-  
-hadoop.kms.authentication.kerberos.keytab
-${user.home}/kms.keytab
-
-  Path to the keytab with credentials for the configured Kerberos 
principal.
-
-  
-
-  
-hadoop.kms.authentication.kerberos.principal
-HTTP/localhost
-
-  The Kerberos principal to use for the HTTP endpoint.
-  The principal must start with 'HTTP/' as per the Kerberos HTTP SPNEGO 
specification.
-
-  
-
-  
-hadoop.kms.authentication.kerberos.name.rules
-DEFAULT
-
-  Rules used to resolve Kerberos principal names.
-
-  
-
-  
-
-  
-hadoop.kms.authentication.signer.secret.provider
-random
-
-  Indicates how the secret to sign the authentication cookies will be
-  stored. Options are 'random' (default), 'string' and 'zookeeper'.
-  If using a setup with multiple KMS instances, 'zookeeper' should be used.
-
-  
-
-  
-
-  
-
hadoop.kms.authentication.signer.secret.provider.zookeeper.path
-/hadoop-kms/hadoop-auth-signature-secret
-
-  The Zookeeper ZNode path where the KMS instances will store and retrieve
-  the secret from.
-
-  
-
-  
-
hadoop.kms.authentication.signer.secret.provider.zookeeper.connection.string
-#HOSTNAME#:#PORT#,...
-
-  The Zookeeper connection string, a list of hostnames and port comma
-  separated.
-
-  
-
-  
-
hadoop.kms.authentication.signer.secret.provider.zookeeper.auth.type
-kerberos
-
-  The Zookeeper authentication type, 'none' or 'sasl' (Kerberos).
-
-  
-
-  
-
hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.keytab
-/etc/hadoop/conf/kms.keytab
-
-  The absolute path for the Kerberos keytab with the credentials to
-  connect to Zookeeper.
-
-  
-
-  
-
hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.principal
-kms/#HOSTNAME#
-
-  The Kerberos service principal used to connect to Zookeeper.
-
-  
-
-

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/1cb29096/contrib/hawq-docker/centos7-docker/hawq-test/conf/log4j.properties
--
diff --git a/contrib/hawq-docker/centos7-docker/hawq-test/conf/log4j.properties 
b/contrib/hawq-docker/centos7-docker/hawq-test/conf/log4j.properties
deleted file mode 100644
index c901ab1..000
--- a/contrib/hawq-docker/centos7-docker/hawq-test/conf/log4j.properties
+++ /dev/null
@@ -1,291 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache Lic

[1/3] incubator-hawq git commit: Revert "HAWQ-1248. Merge Dockerfiles for HAWQ Dev into HAWQ code base."

2017-01-13 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 440ce595a -> 37d5c28c4


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/37d5c28c/contrib/hawq-docker/centos7-docker/hawq-test/conf/kms-site.xml
--
diff --git a/contrib/hawq-docker/centos7-docker/hawq-test/conf/kms-site.xml 
b/contrib/hawq-docker/centos7-docker/hawq-test/conf/kms-site.xml
deleted file mode 100644
index a810ca4..000
--- a/contrib/hawq-docker/centos7-docker/hawq-test/conf/kms-site.xml
+++ /dev/null
@@ -1,173 +0,0 @@
-
-
-
-
-  
-
-  
-hadoop.kms.key.provider.uri
-jceks://file@/${user.home}/kms.keystore
-
-  URI of the backing KeyProvider for the KMS.
-
-  
-
-  
-hadoop.security.keystore.JavaKeyStoreProvider.password
-none
-
-  If using the JavaKeyStoreProvider, the password for the keystore file.
-
-  
-
-  
-
-  
-hadoop.kms.cache.enable
-true
-
-  Whether the KMS will act as a cache for the backing KeyProvider.
-  When the cache is enabled, operations like getKeyVersion, getMetadata,
-  and getCurrentKey will sometimes return cached data without consulting
-  the backing KeyProvider. Cached values are flushed when keys are deleted
-  or modified.
-
-  
-
-  
-hadoop.kms.cache.timeout.ms
-60
-
-  Expiry time for the KMS key version and key metadata cache, in
-  milliseconds. This affects getKeyVersion and getMetadata.
-
-  
-
-  
-hadoop.kms.current.key.cache.timeout.ms
-3
-
-  Expiry time for the KMS current key cache, in milliseconds. This
-  affects getCurrentKey operations.
-
-  
-
-  
-
-  
-hadoop.kms.audit.aggregation.window.ms
-1
-
-  Duplicate audit log events within the aggregation window (specified in
-  ms) are quashed to reduce log traffic. A single message for aggregated
-  events is printed at the end of the window, along with a count of the
-  number of aggregated events.
-
-  
-
-  
-
-  
-hadoop.kms.authentication.type
-simple
-
-  Authentication type for the KMS. Can be either "simple"
-  or "kerberos".
-
-  
-
-  
-hadoop.kms.authentication.kerberos.keytab
-${user.home}/kms.keytab
-
-  Path to the keytab with credentials for the configured Kerberos 
principal.
-
-  
-
-  
-hadoop.kms.authentication.kerberos.principal
-HTTP/localhost
-
-  The Kerberos principal to use for the HTTP endpoint.
-  The principal must start with 'HTTP/' as per the Kerberos HTTP SPNEGO 
specification.
-
-  
-
-  
-hadoop.kms.authentication.kerberos.name.rules
-DEFAULT
-
-  Rules used to resolve Kerberos principal names.
-
-  
-
-  
-
-  
-hadoop.kms.authentication.signer.secret.provider
-random
-
-  Indicates how the secret to sign the authentication cookies will be
-  stored. Options are 'random' (default), 'string' and 'zookeeper'.
-  If using a setup with multiple KMS instances, 'zookeeper' should be used.
-
-  
-
-  
-
-  
-
hadoop.kms.authentication.signer.secret.provider.zookeeper.path
-/hadoop-kms/hadoop-auth-signature-secret
-
-  The Zookeeper ZNode path where the KMS instances will store and retrieve
-  the secret from.
-
-  
-
-  
-
hadoop.kms.authentication.signer.secret.provider.zookeeper.connection.string
-#HOSTNAME#:#PORT#,...
-
-  The Zookeeper connection string, a list of hostnames and port comma
-  separated.
-
-  
-
-  
-
hadoop.kms.authentication.signer.secret.provider.zookeeper.auth.type
-kerberos
-
-  The Zookeeper authentication type, 'none' or 'sasl' (Kerberos).
-
-  
-
-  
-
hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.keytab
-/etc/hadoop/conf/kms.keytab
-
-  The absolute path for the Kerberos keytab with the credentials to
-  connect to Zookeeper.
-
-  
-
-  
-
hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.principal
-kms/#HOSTNAME#
-
-  The Kerberos service principal used to connect to Zookeeper.
-
-  
-
-

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/37d5c28c/contrib/hawq-docker/centos7-docker/hawq-test/conf/log4j.properties
--
diff --git a/contrib/hawq-docker/centos7-docker/hawq-test/conf/log4j.properties 
b/contrib/hawq-docker/centos7-docker/hawq-test/conf/log4j.properties
deleted file mode 100644
index c901ab1..000
--- a/contrib/hawq-docker/centos7-docker/hawq-test/conf/log4j.properties
+++ /dev/null
@@ -1,291 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.

[2/3] incubator-hawq git commit: Revert "HAWQ-1248. Merge Dockerfiles for HAWQ Dev into HAWQ code base."

2017-01-13 Thread huor
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/37d5c28c/contrib/hawq-docker/centos6-docker/hawq-test/conf/log4j.properties
--
diff --git a/contrib/hawq-docker/centos6-docker/hawq-test/conf/log4j.properties 
b/contrib/hawq-docker/centos6-docker/hawq-test/conf/log4j.properties
deleted file mode 100644
index c901ab1..000
--- a/contrib/hawq-docker/centos6-docker/hawq-test/conf/log4j.properties
+++ /dev/null
@@ -1,291 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Define some default values that can be overridden by system properties
-hadoop.root.logger=INFO,console
-hadoop.log.dir=.
-hadoop.log.file=hadoop.log
-
-# Define the root logger to the system property "hadoop.root.logger".
-log4j.rootLogger=${hadoop.root.logger}, EventCounter
-
-# Logging Threshold
-log4j.threshold=ALL
-
-# Null Appender
-log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
-
-#
-# Rolling File Appender - cap space usage at 5gb.
-#
-hadoop.log.maxfilesize=256MB
-hadoop.log.maxbackupindex=20
-log4j.appender.RFA=org.apache.log4j.RollingFileAppender
-log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
-
-log4j.appender.RFA.MaxFileSize=${hadoop.log.maxfilesize}
-log4j.appender.RFA.MaxBackupIndex=${hadoop.log.maxbackupindex}
-
-log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-# Debugging Pattern format
-#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} 
(%F:%M(%L)) - %m%n
-
-
-#
-# Daily Rolling File Appender
-#
-
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}
-
-# Rollover at midnight
-log4j.appender.DRFA.DatePattern=.-MM-dd
-
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-# Debugging Pattern format
-#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} 
(%F:%M(%L)) - %m%n
-
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this 
-#
-
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p 
%c{2}: %m%n
-
-#
-# TaskLog Appender
-#
-
-#Default values
-hadoop.tasklog.taskid=null
-hadoop.tasklog.iscleanup=false
-hadoop.tasklog.noKeepSplits=4
-hadoop.tasklog.totalLogFileSize=100
-hadoop.tasklog.purgeLogSplits=true
-hadoop.tasklog.logsRetainHours=12
-
-log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender
-log4j.appender.TLA.taskId=${hadoop.tasklog.taskid}
-log4j.appender.TLA.isCleanup=${hadoop.tasklog.iscleanup}
-log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}
-
-log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
-log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-
-#
-# HDFS block state change log from block manager
-#
-# Uncomment the following to suppress normal block state change
-# messages from BlockManager in NameNode.
-#log4j.logger.BlockStateChange=WARN
-
-#
-#Security appender
-#
-hadoop.security.logger=INFO,NullAppender
-hadoop.security.log.maxfilesize=256MB
-hadoop.security.log.maxbackupindex=20
-log4j.category.SecurityLogger=${hadoop.security.logger}
-hadoop.security.log.file=SecurityAuth-${user.name}.audit
-log4j.appender.RFAS=org.apache.log4j.RollingFileAppender 
-log4j.appender.RFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
-log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout
-log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.appender.RFAS.MaxFileSize=${hadoop.security.log.maxfilesize}
-log4j.appender.RFAS.MaxBackupIndex=${hadoop.security.log.maxbackupindex}
-
-#
-# Daily Rolling Security appender
-#
-log4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender 
-log4j.appender.DRFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
-log4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout
-log4j.a

[3/3] incubator-hawq git commit: Revert "HAWQ-1248. Merge Dockerfiles for HAWQ Dev into HAWQ code base."

2017-01-13 Thread huor
Revert "HAWQ-1248. Merge Dockerfiles for HAWQ Dev into HAWQ code base."

This reverts commit 440ce595a2298ac9be16973f0c7c4c358ddb2cd0.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/37d5c28c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/37d5c28c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/37d5c28c

Branch: refs/heads/master
Commit: 37d5c28c44fde95987cb9fc96318e3b8fe78f04c
Parents: 440ce59
Author: Ruilong Huo 
Authored: Fri Jan 13 19:38:13 2017 +0800
Committer: Ruilong Huo 
Committed: Fri Jan 13 19:38:13 2017 +0800

--
 contrib/hawq-docker/Makefile| 222 --
 contrib/hawq-docker/README.md   |  97 ---
 .../centos6-docker/hawq-dev/Dockerfile  | 123 
 .../centos6-docker/hawq-test/Dockerfile |  40 ---
 .../hawq-test/conf/capacity-scheduler.xml   | 134 -
 .../hawq-test/conf/configuration.xsl|  40 ---
 .../hawq-test/conf/container-executor.cfg   |   4 -
 .../centos6-docker/hawq-test/conf/core-site.xml |  24 --
 .../hawq-test/conf/hadoop-env.cmd   |  92 --
 .../centos6-docker/hawq-test/conf/hadoop-env.sh | 110 ---
 .../hawq-test/conf/hadoop-metrics.properties|  75 -
 .../hawq-test/conf/hadoop-metrics2.properties   |  68 -
 .../hawq-test/conf/hadoop-policy.xml| 226 --
 .../centos6-docker/hawq-test/conf/hdfs-site.xml | 100 ---
 .../centos6-docker/hawq-test/conf/kms-acls.xml  | 135 -
 .../centos6-docker/hawq-test/conf/kms-env.sh|  55 
 .../hawq-test/conf/kms-log4j.properties |  38 ---
 .../centos6-docker/hawq-test/conf/kms-site.xml  | 173 ---
 .../hawq-test/conf/log4j.properties | 291 ---
 .../hawq-test/conf/mapred-env.cmd   |  20 --
 .../centos6-docker/hawq-test/conf/mapred-env.sh |  27 --
 .../hawq-test/conf/mapred-queues.xml.template   |  92 --
 .../hawq-test/conf/mapred-site.xml.template |  21 --
 .../centos6-docker/hawq-test/conf/slaves|   1 -
 .../hawq-test/conf/ssl-client.xml.example   |  80 -
 .../hawq-test/conf/ssl-server.xml.example   |  78 -
 .../centos6-docker/hawq-test/conf/yarn-env.cmd  |  60 
 .../centos6-docker/hawq-test/entrypoint.sh  |  34 ---
 .../centos6-docker/hawq-test/start-hdfs.sh  |  39 ---
 .../centos7-docker/hawq-dev/Dockerfile  |  75 -
 .../centos7-docker/hawq-test/Dockerfile |  40 ---
 .../hawq-test/conf/capacity-scheduler.xml   | 134 -
 .../hawq-test/conf/configuration.xsl|  40 ---
 .../hawq-test/conf/container-executor.cfg   |   4 -
 .../centos7-docker/hawq-test/conf/core-site.xml |  24 --
 .../hawq-test/conf/hadoop-env.cmd   |  92 --
 .../centos7-docker/hawq-test/conf/hadoop-env.sh | 110 ---
 .../hawq-test/conf/hadoop-metrics.properties|  75 -
 .../hawq-test/conf/hadoop-metrics2.properties   |  68 -
 .../hawq-test/conf/hadoop-policy.xml| 226 --
 .../centos7-docker/hawq-test/conf/hdfs-site.xml | 100 ---
 .../centos7-docker/hawq-test/conf/kms-acls.xml  | 135 -
 .../centos7-docker/hawq-test/conf/kms-env.sh|  55 
 .../hawq-test/conf/kms-log4j.properties |  38 ---
 .../centos7-docker/hawq-test/conf/kms-site.xml  | 173 ---
 .../hawq-test/conf/log4j.properties | 291 ---
 .../hawq-test/conf/mapred-env.cmd   |  20 --
 .../centos7-docker/hawq-test/conf/mapred-env.sh |  27 --
 .../hawq-test/conf/mapred-queues.xml.template   |  92 --
 .../hawq-test/conf/mapred-site.xml.template |  21 --
 .../centos7-docker/hawq-test/conf/slaves|   1 -
 .../hawq-test/conf/ssl-client.xml.example   |  80 -
 .../hawq-test/conf/ssl-server.xml.example   |  78 -
 .../centos7-docker/hawq-test/conf/yarn-env.cmd  |  60 
 .../centos7-docker/hawq-test/entrypoint.sh  |  33 ---
 .../centos7-docker/hawq-test/start-hdfs.sh  |  39 ---
 56 files changed, 4630 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/37d5c28c/contrib/hawq-docker/Makefile
--
diff --git a/contrib/hawq-docker/Makefile b/contrib/hawq-docker/Makefile
deleted file mode 100644
index 120ebe2..000
--- a/contrib/hawq-docker/Makefile
+++ /dev/null
@@ -1,222 +0,0 @@
-#!/usr/bin/make all
-
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this 

[1/3] incubator-hawq git commit: HAWQ-1248. Merge Dockerfiles for HAWQ Dev into HAWQ code base.

2017-01-13 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master c8be9f29e -> 440ce595a


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/440ce595/contrib/hawq-docker/centos7-docker/hawq-test/conf/kms-site.xml
--
diff --git a/contrib/hawq-docker/centos7-docker/hawq-test/conf/kms-site.xml 
b/contrib/hawq-docker/centos7-docker/hawq-test/conf/kms-site.xml
new file mode 100644
index 000..a810ca4
--- /dev/null
+++ b/contrib/hawq-docker/centos7-docker/hawq-test/conf/kms-site.xml
@@ -0,0 +1,173 @@
+
+
+
+
+  
+
+  
+hadoop.kms.key.provider.uri
+jceks://file@/${user.home}/kms.keystore
+
+  URI of the backing KeyProvider for the KMS.
+
+  
+
+  
+hadoop.security.keystore.JavaKeyStoreProvider.password
+none
+
+  If using the JavaKeyStoreProvider, the password for the keystore file.
+
+  
+
+  
+
+  
+hadoop.kms.cache.enable
+true
+
+  Whether the KMS will act as a cache for the backing KeyProvider.
+  When the cache is enabled, operations like getKeyVersion, getMetadata,
+  and getCurrentKey will sometimes return cached data without consulting
+  the backing KeyProvider. Cached values are flushed when keys are deleted
+  or modified.
+
+  
+
+  
+hadoop.kms.cache.timeout.ms
+60
+
+  Expiry time for the KMS key version and key metadata cache, in
+  milliseconds. This affects getKeyVersion and getMetadata.
+
+  
+
+  
+hadoop.kms.current.key.cache.timeout.ms
+3
+
+  Expiry time for the KMS current key cache, in milliseconds. This
+  affects getCurrentKey operations.
+
+  
+
+  
+
+  
+hadoop.kms.audit.aggregation.window.ms
+1
+
+  Duplicate audit log events within the aggregation window (specified in
+  ms) are quashed to reduce log traffic. A single message for aggregated
+  events is printed at the end of the window, along with a count of the
+  number of aggregated events.
+
+  
+
+  
+
+  
+hadoop.kms.authentication.type
+simple
+
+  Authentication type for the KMS. Can be either "simple"
+  or "kerberos".
+
+  
+
+  
+hadoop.kms.authentication.kerberos.keytab
+${user.home}/kms.keytab
+
+  Path to the keytab with credentials for the configured Kerberos 
principal.
+
+  
+
+  
+hadoop.kms.authentication.kerberos.principal
+HTTP/localhost
+
+  The Kerberos principal to use for the HTTP endpoint.
+  The principal must start with 'HTTP/' as per the Kerberos HTTP SPNEGO 
specification.
+
+  
+
+  
+hadoop.kms.authentication.kerberos.name.rules
+DEFAULT
+
+  Rules used to resolve Kerberos principal names.
+
+  
+
+  
+
+  
+hadoop.kms.authentication.signer.secret.provider
+random
+
+  Indicates how the secret to sign the authentication cookies will be
+  stored. Options are 'random' (default), 'string' and 'zookeeper'.
+  If using a setup with multiple KMS instances, 'zookeeper' should be used.
+
+  
+
+  
+
+  
+
hadoop.kms.authentication.signer.secret.provider.zookeeper.path
+/hadoop-kms/hadoop-auth-signature-secret
+
+  The Zookeeper ZNode path where the KMS instances will store and retrieve
+  the secret from.
+
+  
+
+  
+
hadoop.kms.authentication.signer.secret.provider.zookeeper.connection.string
+#HOSTNAME#:#PORT#,...
+
+  The Zookeeper connection string, a list of hostnames and port comma
+  separated.
+
+  
+
+  
+
hadoop.kms.authentication.signer.secret.provider.zookeeper.auth.type
+kerberos
+
+  The Zookeeper authentication type, 'none' or 'sasl' (Kerberos).
+
+  
+
+  
+
hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.keytab
+/etc/hadoop/conf/kms.keytab
+
+  The absolute path for the Kerberos keytab with the credentials to
+  connect to Zookeeper.
+
+  
+
+  
+
hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.principal
+kms/#HOSTNAME#
+
+  The Kerberos service principal used to connect to Zookeeper.
+
+  
+
+

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/440ce595/contrib/hawq-docker/centos7-docker/hawq-test/conf/log4j.properties
--
diff --git a/contrib/hawq-docker/centos7-docker/hawq-test/conf/log4j.properties 
b/contrib/hawq-docker/centos7-docker/hawq-test/conf/log4j.properties
new file mode 100644
index 000..c901ab1
--- /dev/null
+++ b/contrib/hawq-docker/centos7-docker/hawq-test/conf/log4j.properties
@@ -0,0 +1,291 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+

[3/3] incubator-hawq git commit: HAWQ-1248. Merge Dockerfiles for HAWQ Dev into HAWQ code base.

2017-01-13 Thread huor
HAWQ-1248. Merge Dockerfiles for HAWQ Dev into HAWQ code base.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/440ce595
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/440ce595
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/440ce595

Branch: refs/heads/master
Commit: 440ce595a2298ac9be16973f0c7c4c358ddb2cd0
Parents: c8be9f2
Author: Richard Guo 
Authored: Tue Jan 3 17:12:59 2017 +0800
Committer: Ruilong Huo 
Committed: Fri Jan 13 19:16:28 2017 +0800

--
 contrib/hawq-docker/Makefile| 222 ++
 contrib/hawq-docker/README.md   |  97 +++
 .../centos6-docker/hawq-dev/Dockerfile  | 123 
 .../centos6-docker/hawq-test/Dockerfile |  40 +++
 .../hawq-test/conf/capacity-scheduler.xml   | 134 +
 .../hawq-test/conf/configuration.xsl|  40 +++
 .../hawq-test/conf/container-executor.cfg   |   4 +
 .../centos6-docker/hawq-test/conf/core-site.xml |  24 ++
 .../hawq-test/conf/hadoop-env.cmd   |  92 ++
 .../centos6-docker/hawq-test/conf/hadoop-env.sh | 110 +++
 .../hawq-test/conf/hadoop-metrics.properties|  75 +
 .../hawq-test/conf/hadoop-metrics2.properties   |  68 +
 .../hawq-test/conf/hadoop-policy.xml| 226 ++
 .../centos6-docker/hawq-test/conf/hdfs-site.xml | 100 +++
 .../centos6-docker/hawq-test/conf/kms-acls.xml  | 135 +
 .../centos6-docker/hawq-test/conf/kms-env.sh|  55 
 .../hawq-test/conf/kms-log4j.properties |  38 +++
 .../centos6-docker/hawq-test/conf/kms-site.xml  | 173 +++
 .../hawq-test/conf/log4j.properties | 291 +++
 .../hawq-test/conf/mapred-env.cmd   |  20 ++
 .../centos6-docker/hawq-test/conf/mapred-env.sh |  27 ++
 .../hawq-test/conf/mapred-queues.xml.template   |  92 ++
 .../hawq-test/conf/mapred-site.xml.template |  21 ++
 .../centos6-docker/hawq-test/conf/slaves|   1 +
 .../hawq-test/conf/ssl-client.xml.example   |  80 +
 .../hawq-test/conf/ssl-server.xml.example   |  78 +
 .../centos6-docker/hawq-test/conf/yarn-env.cmd  |  60 
 .../centos6-docker/hawq-test/entrypoint.sh  |  34 +++
 .../centos6-docker/hawq-test/start-hdfs.sh  |  39 +++
 .../centos7-docker/hawq-dev/Dockerfile  |  75 +
 .../centos7-docker/hawq-test/Dockerfile |  40 +++
 .../hawq-test/conf/capacity-scheduler.xml   | 134 +
 .../hawq-test/conf/configuration.xsl|  40 +++
 .../hawq-test/conf/container-executor.cfg   |   4 +
 .../centos7-docker/hawq-test/conf/core-site.xml |  24 ++
 .../hawq-test/conf/hadoop-env.cmd   |  92 ++
 .../centos7-docker/hawq-test/conf/hadoop-env.sh | 110 +++
 .../hawq-test/conf/hadoop-metrics.properties|  75 +
 .../hawq-test/conf/hadoop-metrics2.properties   |  68 +
 .../hawq-test/conf/hadoop-policy.xml| 226 ++
 .../centos7-docker/hawq-test/conf/hdfs-site.xml | 100 +++
 .../centos7-docker/hawq-test/conf/kms-acls.xml  | 135 +
 .../centos7-docker/hawq-test/conf/kms-env.sh|  55 
 .../hawq-test/conf/kms-log4j.properties |  38 +++
 .../centos7-docker/hawq-test/conf/kms-site.xml  | 173 +++
 .../hawq-test/conf/log4j.properties | 291 +++
 .../hawq-test/conf/mapred-env.cmd   |  20 ++
 .../centos7-docker/hawq-test/conf/mapred-env.sh |  27 ++
 .../hawq-test/conf/mapred-queues.xml.template   |  92 ++
 .../hawq-test/conf/mapred-site.xml.template |  21 ++
 .../centos7-docker/hawq-test/conf/slaves|   1 +
 .../hawq-test/conf/ssl-client.xml.example   |  80 +
 .../hawq-test/conf/ssl-server.xml.example   |  78 +
 .../centos7-docker/hawq-test/conf/yarn-env.cmd  |  60 
 .../centos7-docker/hawq-test/entrypoint.sh  |  33 +++
 .../centos7-docker/hawq-test/start-hdfs.sh  |  39 +++
 56 files changed, 4630 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/440ce595/contrib/hawq-docker/Makefile
--
diff --git a/contrib/hawq-docker/Makefile b/contrib/hawq-docker/Makefile
new file mode 100644
index 000..120ebe2
--- /dev/null
+++ b/contrib/hawq-docker/Makefile
@@ -0,0 +1,222 @@
+#!/usr/bin/make all
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the

[2/3] incubator-hawq git commit: HAWQ-1248. Merge Dockerfiles for HAWQ Dev into HAWQ code base.

2017-01-13 Thread huor
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/440ce595/contrib/hawq-docker/centos6-docker/hawq-test/conf/log4j.properties
--
diff --git a/contrib/hawq-docker/centos6-docker/hawq-test/conf/log4j.properties 
b/contrib/hawq-docker/centos6-docker/hawq-test/conf/log4j.properties
new file mode 100644
index 000..c901ab1
--- /dev/null
+++ b/contrib/hawq-docker/centos6-docker/hawq-test/conf/log4j.properties
@@ -0,0 +1,291 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Define some default values that can be overridden by system properties
+hadoop.root.logger=INFO,console
+hadoop.log.dir=.
+hadoop.log.file=hadoop.log
+
+# Define the root logger to the system property "hadoop.root.logger".
+log4j.rootLogger=${hadoop.root.logger}, EventCounter
+
+# Logging Threshold
+log4j.threshold=ALL
+
+# Null Appender
+log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
+
+#
+# Rolling File Appender - cap space usage at 5gb.
+#
+hadoop.log.maxfilesize=256MB
+hadoop.log.maxbackupindex=20
+log4j.appender.RFA=org.apache.log4j.RollingFileAppender
+log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
+
+log4j.appender.RFA.MaxFileSize=${hadoop.log.maxfilesize}
+log4j.appender.RFA.MaxBackupIndex=${hadoop.log.maxbackupindex}
+
+log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+# Debugging Pattern format
+#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} 
(%F:%M(%L)) - %m%n
+
+
+#
+# Daily Rolling File Appender
+#
+
+log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}
+
+# Rollover at midnight
+log4j.appender.DRFA.DatePattern=.-MM-dd
+
+log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+# Debugging Pattern format
+#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} 
(%F:%M(%L)) - %m%n
+
+
+#
+# console
+# Add "console" to rootlogger above if you want to use this 
+#
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p 
%c{2}: %m%n
+
+#
+# TaskLog Appender
+#
+
+#Default values
+hadoop.tasklog.taskid=null
+hadoop.tasklog.iscleanup=false
+hadoop.tasklog.noKeepSplits=4
+hadoop.tasklog.totalLogFileSize=100
+hadoop.tasklog.purgeLogSplits=true
+hadoop.tasklog.logsRetainHours=12
+
+log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender
+log4j.appender.TLA.taskId=${hadoop.tasklog.taskid}
+log4j.appender.TLA.isCleanup=${hadoop.tasklog.iscleanup}
+log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}
+
+log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
+log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+
+#
+# HDFS block state change log from block manager
+#
+# Uncomment the following to suppress normal block state change
+# messages from BlockManager in NameNode.
+#log4j.logger.BlockStateChange=WARN
+
+#
+#Security appender
+#
+hadoop.security.logger=INFO,NullAppender
+hadoop.security.log.maxfilesize=256MB
+hadoop.security.log.maxbackupindex=20
+log4j.category.SecurityLogger=${hadoop.security.logger}
+hadoop.security.log.file=SecurityAuth-${user.name}.audit
+log4j.appender.RFAS=org.apache.log4j.RollingFileAppender 
+log4j.appender.RFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
+log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout
+log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+log4j.appender.RFAS.MaxFileSize=${hadoop.security.log.maxfilesize}
+log4j.appender.RFAS.MaxBackupIndex=${hadoop.security.log.maxbackupindex}
+
+#
+# Daily Rolling Security appender
+#
+log4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender 
+log4j.appender.DRFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
+log4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout
+log4j.appen

incubator-hawq git commit: HAWQ-1268. Update pom.xml to reflect the correct version for apache hawq 2.1.0.0-incubating

2017-01-12 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master a8177153b -> c8be9f29e


HAWQ-1268. Update pom.xml to reflect the correct version for apache hawq 
2.1.0.0-incubating


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/c8be9f29
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/c8be9f29
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/c8be9f29

Branch: refs/heads/master
Commit: c8be9f29e2bd35508ad529a64fb1884daec3155e
Parents: a817715
Author: Ruilong Huo 
Authored: Fri Jan 13 10:04:13 2017 +0800
Committer: Ruilong Huo 
Committed: Fri Jan 13 10:37:04 2017 +0800

--
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/c8be9f29/pom.xml
--
diff --git a/pom.xml b/pom.xml
index f128681..bdd2e75 100644
--- a/pom.xml
+++ b/pom.xml
@@ -22,7 +22,7 @@
  
   org.apache.hawq
   hawq
-  2.0
+  2.1
   pom
 
 



incubator-hawq git commit: HAWQ-1267. Update NOTICE file to reflect the right year for copyright

2017-01-12 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master bf4742cb6 -> a8177153b


HAWQ-1267. Update NOTICE file to reflect the right year for copyright


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/a8177153
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/a8177153
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/a8177153

Branch: refs/heads/master
Commit: a8177153b25123759d15411146c094dc350e4bab
Parents: bf4742c
Author: Ruilong Huo 
Authored: Fri Jan 13 10:00:41 2017 +0800
Committer: Ruilong Huo 
Committed: Fri Jan 13 10:00:41 2017 +0800

--
 NOTICE | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a8177153/NOTICE
--
diff --git a/NOTICE b/NOTICE
index 6589c8f..1b1b2e7 100644
--- a/NOTICE
+++ b/NOTICE
@@ -1,5 +1,5 @@
 Apache HAWQ (incubating) 
-Copyright 2016 The Apache Software Foundation.
+Copyright 2017 The Apache Software Foundation.
 
 This product includes software developed at
 The Apache Software Foundation (http://www.apache.org/).



incubator-hawq git commit: HAWQ-1092. lc_collate and lc_ctype do not work after setting through hawq init

2016-10-10 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 5f3db03a7 -> 85192f8e4


HAWQ-1092. lc_collate and lc_ctype do not work after setting through hawq init


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/85192f8e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/85192f8e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/85192f8e

Branch: refs/heads/master
Commit: 85192f8e4742fe206b1c5f9f86b0c85d5e086cf6
Parents: 5f3db03
Author: Paul Guo 
Authored: Sun Oct 9 17:47:32 2016 +0800
Committer: Ruilong Huo 
Committed: Tue Oct 11 11:09:30 2016 +0800

--
 src/backend/access/transam/xlog.c|  2 ++
 src/test/feature/README.md   |  2 +-
 src/test/feature/catalog/ans/guc.ans | 12 
 src/test/feature/catalog/sql/guc.sql |  2 ++
 4 files changed, 17 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/85192f8e/src/backend/access/transam/xlog.c
--
diff --git a/src/backend/access/transam/xlog.c 
b/src/backend/access/transam/xlog.c
index a0b3c82..eeef04e 100644
--- a/src/backend/access/transam/xlog.c
+++ b/src/backend/access/transam/xlog.c
@@ -5299,6 +5299,8 @@ XLOGShmemInit(void)
 void
 XLogStartupInit(void)
 {
+   if (!IsBootstrapProcessingMode())
+   ReadControlFile();
 }
 
 /*

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/85192f8e/src/test/feature/README.md
--
diff --git a/src/test/feature/README.md b/src/test/feature/README.md
index 229c7c4..2e27f97 100644
--- a/src/test/feature/README.md
+++ b/src/test/feature/README.md
@@ -12,7 +12,7 @@ Also, you could enter the `$HAWQ_HOME/src/test/feature` 
folder to `make` and `ma
 Before building the code of feature tests part, just make sure your compiler 
supports C++11 and you have apache HAWQ successfully compiled.
 
 # Run
-1. Make sure HAWQ is running correctly. If not, `init` or `start` HAWQ at 
first.
+1. Make sure HAWQ is running correctly. If not, `init` or `start` HAWQ at 
first. Note please don't set locale related arguments for hawq init.
 2. Load environment configuration by running `source 
$INSTALL_PREFIX/greenplum_path.sh`.
 3. Load hdfs configuration. For example, `export 
HADOOP_HOME=/Users/wuhong/hadoop-2.7.2 && export 
PATH=${PATH}:${HADOOP_HOME}/bin`. Since some test cases need `hdfs` and 
`hadoop` command, just ensure these commands work before running. Otherwise you 
will get failure.
 4. Run `./feature-test`, you could use `--gtest_filter` option to filter test 
cases(both positive and negative patterns are supported). Please see more 
options by running `./feature-test --help`.

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/85192f8e/src/test/feature/catalog/ans/guc.ans
--
diff --git a/src/test/feature/catalog/ans/guc.ans 
b/src/test/feature/catalog/ans/guc.ans
index 7c4595c..117d365 100644
--- a/src/test/feature/catalog/ans/guc.ans
+++ b/src/test/feature/catalog/ans/guc.ans
@@ -24,3 +24,15 @@ SELECT f1 FROM DATE_TBL;
  09-04-1957
 (1 row)
 
+--- Partially test JIRA HAWQ-1092 lc_collate and lc_ctype do not work after 
setting through hawq init
+SELECT name, setting from pg_settings where name like 'lc%'
+name |  setting   
+-+
+ lc_collate  | en_US.utf8
+ lc_ctype| en_US.utf8
+ lc_messages | en_US.utf8
+ lc_monetary | en_US.utf8
+ lc_numeric  | en_US.utf8
+ lc_time | en_US.utf8
+(6 rows)
+

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/85192f8e/src/test/feature/catalog/sql/guc.sql
--
diff --git a/src/test/feature/catalog/sql/guc.sql 
b/src/test/feature/catalog/sql/guc.sql
index cf16538..f22fb8c 100644
--- a/src/test/feature/catalog/sql/guc.sql
+++ b/src/test/feature/catalog/sql/guc.sql
@@ -9,3 +9,5 @@ SELECT f1 FROM DATE_TBL;
 SET DATESTYLE TO 'POSTGRES, DMY';
 SELECT f1 FROM DATE_TBL;
 
+--- Partially test JIRA HAWQ-1092 lc_collate and lc_ctype do not work after 
setting through hawq init
+SELECT name, setting from pg_settings where name like 'lc%'



incubator-hawq git commit: HAWQ-1051. Failing in reverse DNS lookup causes resource manager core dump

2016-10-08 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 8928e88ea -> 4b3fdb262


HAWQ-1051. Failing in reverse DNS lookup causes resource manager core dump

Signed-off-by: jiny2 


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/4b3fdb26
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/4b3fdb26
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/4b3fdb26

Branch: refs/heads/master
Commit: 4b3fdb262832f056cb39e725331938721adb39d7
Parents: 8928e88
Author: stanlyxiang 
Authored: Wed Sep 21 23:57:27 2016 +0800
Committer: Ruilong Huo 
Committed: Sun Oct 9 11:35:45 2016 +0800

--
 src/backend/resourcemanager/requesthandler.c | 6 +-
 1 file changed, 5 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/4b3fdb26/src/backend/resourcemanager/requesthandler.c
--
diff --git a/src/backend/resourcemanager/requesthandler.c 
b/src/backend/resourcemanager/requesthandler.c
index 9946322..976156b 100644
--- a/src/backend/resourcemanager/requesthandler.c
+++ b/src/backend/resourcemanager/requesthandler.c
@@ -627,7 +627,11 @@ bool handleRMSEGRequestIMAlive(void **arg)
fts_client_ip_len = strlen(fts_client_ip);
inet_aton(fts_client_ip, &fts_client_addr);
fts_client_host = gethostbyaddr(&fts_client_addr, 4, AF_INET);
-   Assert(fts_client_host != NULL);
+   if (fts_client_host == NULL)
+   {
+   elog(WARNING, "Failed to reverse DNS lookup for ip %s.", 
fts_client_ip);
+   return true;
+   }
 
/* Get the received machine id instance start address. */
SegInfo fts_client_seginfo = 
(SegInfo)(SMBUFF_CONTENT(&(conntrack->MessageBuff)) +



incubator-hawq git commit: HAWQ-1044. Remove duplicated testcases for hawq resgister usage2

2016-09-20 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 98f7e8e93 -> e6c7fda07


HAWQ-1044. Remove duplicated testcases for hawq resgister usage2


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/e6c7fda0
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/e6c7fda0
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/e6c7fda0

Branch: refs/heads/master
Commit: e6c7fda079828c2b2cdcc56a4fac249933527969
Parents: 98f7e8e
Author: Chunling Wang 
Authored: Mon Sep 19 17:42:36 2016 +0800
Committer: Ruilong Huo 
Committed: Tue Sep 20 18:24:43 2016 +0800

--
 .../ManagementTool/test_hawq_register.cpp   | 104 ---
 1 file changed, 104 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/e6c7fda0/src/test/feature/ManagementTool/test_hawq_register.cpp
--
diff --git a/src/test/feature/ManagementTool/test_hawq_register.cpp 
b/src/test/feature/ManagementTool/test_hawq_register.cpp
index 791b522..123f73c 100644
--- a/src/test/feature/ManagementTool/test_hawq_register.cpp
+++ b/src/test/feature/ManagementTool/test_hawq_register.cpp
@@ -135,27 +135,6 @@ TEST_F(TestHawqRegister, TestFiles) {
util.execute("drop table hawqregister;");
 }
 
-TEST_F(TestHawqRegister, TestUsage1HashDistributedTable) {
-   SQLUtility util;
-   string rootPath(util.getTestRootPath());
-   string relativePath("/ManagementTool/test_hawq_register_hawq.paq");
-   string filePath = rootPath + relativePath;
-
-auto cmd = hawq::test::stringFormat("hadoop fs -put -f %s 
%s/hawq_register_hawq.paq", filePath.c_str(), getHdfsLocation().c_str());
-   EXPECT_EQ(0, Command::getCommandStatus(cmd));
-
-   util.execute("create table hawqregister(i int) with (appendonly=true, 
orientation=parquet) distributed by (i);");
-   util.query("select * from hawqregister;", 0);
-
-cmd = hawq::test::stringFormat("hawq register -d %s -f 
%s/hawq_register_hawq.paq hawqregister", HAWQ_DB, getHdfsLocation().c_str());
-   EXPECT_EQ(1, Command::getCommandStatus(cmd));
-   util.query("select * from hawqregister;", 0);
-
-cmd = hawq::test::stringFormat("hadoop fs -rm -r %s 
%s/hawq_register_hawq.paq", filePath.c_str(), getHdfsLocation().c_str());
-   EXPECT_EQ(0, Command::getCommandStatus(cmd));
-   util.execute("drop table hawqregister;");
-}
-
 TEST_F(TestHawqRegister, TestUsage1NotParquetFile) {
SQLUtility util;
string rootPath(util.getTestRootPath());
@@ -232,89 +211,6 @@ TEST_F(TestHawqRegister, TestUsage1NotHDFSPath) {
util.execute("drop table hawqregister;");
 }
 
-TEST_F(TestHawqRegister, TestUsage2ParquetRandomly) {
-  SQLUtility util;
-  util.execute("drop table if exists t;");
-  util.execute("drop table if exists nt;");
-  util.execute("create table t(i int) with (appendonly=true, 
orientation=parquet) distributed randomly;");
-  util.execute("insert into t values(1), (2), (3);");
-  util.query("select * from t;", 3);
-  EXPECT_EQ(0, Command::getCommandStatus("hawq extract -d " + (string) HAWQ_DB 
+ " -o t.yml testhawqregister_testusage2parquetrandomly.t"));
-  EXPECT_EQ(0, Command::getCommandStatus("hawq register -d " + (string) 
HAWQ_DB + " -c t.yml testhawqregister_testusage2parquetrandomly.nt"));
-  util.query("select * from nt;", 3);
-  EXPECT_EQ(0, Command::getCommandStatus("rm -rf t.yml"));
-  util.execute("drop table t;");
-  util.execute("drop table nt;");
-}
-
-TEST_F(TestHawqRegister, TestUsage2ParquetHash1) {
-  SQLUtility util;
-  util.execute("drop table if exists t4;");
-  util.execute("create table t4(i int) with (appendonly=true, 
orientation=parquet) distributed by (i);");
-  util.execute("insert into t4 values(1), (2), (3);");
-  EXPECT_EQ(0, Command::getCommandStatus("hawq extract -d " + (string) HAWQ_DB 
+ " -o t4.yml testhawqregister_testusage2parquethash1.t4"));
-  EXPECT_EQ(0, Command::getCommandStatus("hawq register -d " + (string) 
HAWQ_DB + " -c t4.yml testhawqregister_testusage2parquethash1.nt4"));
-  util.query("select * from nt4;", 3);
-  EXPECT_EQ(0, Command::getCommandStatus("rm -rf t4.yml"));
-  util.execute("drop table t4;");
-  util.execute("drop table nt4;");
-}
-
-
-TEST_F(TestHawqRegister, TestUsage2ParquetHash2) {
-  SQLUtility util;
-  util.execute("drop table if exists t5;");
-  util.execute("create table t5(i int, j varchar, k text) with 
(appendonly=true, orientation=parquet) distributed by (i, k);");
-  util.execute("insert into t5 values(1, 'x', 'ab'), (2, 'y', 'cd'), (3, 'z', 
'ef');");
-  EXPECT_EQ(0, Command::getCommandStatus("hawq extract -d " + (string) HAWQ_DB 
+ " -o t5.yml testhawqregister_testusage2parquethash2.t5"));
-  EXPECT_EQ(0, Command::getCommandStatus("hawq regis

incubator-hawq git commit: HAWQ-1061. Add check_sizes_valid for all mode of hawq register.

2016-09-20 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master eb10af42d -> 08ed4bc39


HAWQ-1061. Add check_sizes_valid for all mode of hawq register.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/08ed4bc3
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/08ed4bc3
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/08ed4bc3

Branch: refs/heads/master
Commit: 08ed4bc39f42ecf546e8e02bf54b28397154add8
Parents: eb10af4
Author: xunzhang 
Authored: Tue Sep 20 17:25:22 2016 +0800
Committer: Ruilong Huo 
Committed: Tue Sep 20 18:11:09 2016 +0800

--
 tools/bin/hawqregister | 27 ++-
 1 file changed, 18 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/08ed4bc3/tools/bin/hawqregister
--
diff --git a/tools/bin/hawqregister b/tools/bin/hawqregister
index 89e9f4b..d030854 100755
--- a/tools/bin/hawqregister
+++ b/tools/bin/hawqregister
@@ -66,8 +66,8 @@ def register_yaml_dict_check(D):
 logger.error('Wrong configuration yaml file format: "%s" attribute 
does not exist.\n See example in "hawq register --help".' % attr)
 sys.exit(1)
 if D['Bucketnum'] <= 0:
-logger.error('Bucketnum should not be zero, please check your yaml 
configuration file.')
-sys.exit(1)
+logger.error('Bucketnum should not be zero, please check your yaml 
configuration file.')
+sys.exit(1)
 if D['FileFormat'] in ['Parquet', 'AO']:
 prefix = D['FileFormat']
 local_check_list = ['%s_FileLocations' % prefix, '%s_Schema' % prefix]
@@ -139,9 +139,9 @@ class GpRegisterAccessor(object):
 schema = ','.join([k['name'] + ' ' + k['type'] for k in schema_info])
 partlist = ""
 for index in range(len(partitions_constraint)):
-  if index > 0:
-  partlist += ", "
-  partlist = partlist + "partition " + partitions_name[index] + " " + 
partitions_constraint[index]
+if index > 0:
+partlist += ", "
+partlist = partlist + "partition " + partitions_name[index] + " " 
+ partitions_constraint[index]
   
 fmt = 'ROW' if fmt == 'AO' else fmt
 if fmt == 'ROW':
@@ -150,7 +150,7 @@ class GpRegisterAccessor(object):
  % (tablename, schema, fmt, 
file_locations['CompressionType'], file_locations['CompressionLevel'], 
file_locations['Checksum'], bucket_number, distrbution_policy))
 else:
 query = ('create table %s(%s) with (appendonly=true, 
orientation=%s, compresstype=%s, compresslevel=%s, checksum=%s, bucketnum=%s) 
%s %s (%s);'
- % (tablename, schema,fmt, 
file_locations['CompressionType'], file_locations['CompressionLevel'], 
file_locations['Checksum'], bucket_number, distrbution_policy, partitionby, 
partlist))
+ % (tablename, schema, fmt, 
file_locations['CompressionType'], file_locations['CompressionLevel'], 
file_locations['Checksum'], bucket_number, distrbution_policy, partitionby, 
partlist))
 else: # Parquet
 if partitionby is None:
 query = ('create table %s(%s) with (appendonly=true, 
orientation=%s, compresstype=%s, compresslevel=%s, pagesize=%s, 
rowgroupsize=%s, bucketnum=%s) %s;'
@@ -168,7 +168,7 @@ class GpRegisterAccessor(object):
 logger.error('Table %s is not an append-only table. There is no 
record in gp_distribution_policy table.' % tablename)
 sys.exit(1)
 if rows[0]['attrnums']:
-logger.error('Cannot register file(s) to a table which is hash 
distribuetd.')
+logger.error('Cannot register file(s) to a table which is hash 
distributed.')
 sys.exit(1)
 
 # pg_paqseg_#
@@ -397,6 +397,14 @@ class HawqRegister(object):
 logger.info('%s is not a file in hdfs, please check the 
yaml configuration file.' % fn)
 sys.exit(1)
 
+def check_sizes_valid():
+for k, fn in enumerate(self.files):
+hdfscmd = 'hdfs dfs -du %s' % fn
+_, out, _ = local_ssh_output(hdfscmd)
+if self.sizes[k] > int(out.strip().split()[0]):
+logger.error('File size(%s) in yaml configuration file 
should not exceed actual length(%s) of file %s.' % (self.sizes[k], 
out.strip().split()[0], fn))
+sys.exit(1)
+
 if self.yml:
 option_parser_yml(options.yml_config)
 self.filepath = self.files[0][:self.files[0].rfind('/')] if 
self.files else ''
@@ -413,7 +421,7 @@ class HawqRegister(object):
 check_hash_type() # Usage1 onl

incubator-hawq git commit: HAWQ-1022. Bugfix for getCommandOutput: using reference to temporary variable in stack address.

2016-08-25 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 48c34ca64 -> b13ca83bf


HAWQ-1022. Bugfix for getCommandOutput: using reference to temporary variable 
in stack address.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/b13ca83b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/b13ca83b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/b13ca83b

Branch: refs/heads/master
Commit: b13ca83bf3d065265f3129edd318a226cd9f9687
Parents: 48c34ca
Author: xunzhang 
Authored: Thu Aug 25 18:24:52 2016 +0800
Committer: xunzhang 
Committed: Thu Aug 25 19:04:04 2016 +0800

--
 src/test/feature/lib/command.cpp | 4 ++--
 src/test/feature/lib/command.h   | 8 
 2 files changed, 6 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/b13ca83b/src/test/feature/lib/command.cpp
--
diff --git a/src/test/feature/lib/command.cpp b/src/test/feature/lib/command.cpp
index d46aaf0..7f5f34a 100644
--- a/src/test/feature/lib/command.cpp
+++ b/src/test/feature/lib/command.cpp
@@ -73,14 +73,14 @@ void Command::_saveToFile() {
   out.close(); 
 }
 
-const string& Command::getCommandOutput(const string& cmd) {
+const string Command::getCommandOutput(const string& cmd) {
   return Command()
   .setCommand(cmd)
   .run()
   .getResultOutput();
 }
 
-const string& Command::getCommandOutput(const string& cmd,
+const string Command::getCommandOutput(const string& cmd,
 const string& out) {
   return Command()
   .setCommand(cmd)

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/b13ca83b/src/test/feature/lib/command.h
--
diff --git a/src/test/feature/lib/command.h b/src/test/feature/lib/command.h
index 931b3bc..e0ae97f 100644
--- a/src/test/feature/lib/command.h
+++ b/src/test/feature/lib/command.h
@@ -18,12 +18,12 @@ class Command {
   Command& run();
   Command& setCommand(const std::string&);
   Command& setOutputFile(const std::string&);
-  const std::string& getCommand() const; 
-  const std::string& getResultOutput() const; 
+  const std::string& getCommand() const;
+  const std::string& getResultOutput() const;
   int getResultStatus() const;
 
-  static const std::string& getCommandOutput(const std::string& cmd);
-  static const std::string& getCommandOutput(const std::string& cmd, const 
std::string& out);
+  static const std::string getCommandOutput(const std::string& cmd);
+  static const std::string getCommandOutput(const std::string& cmd, const 
std::string& out);
   static int getCommandStatus(const std::string& cmd);
   static int getCommandStatus(const std::string& cmd, const std::string& out);
 



incubator-hawq git commit: HAWQ-1020. Fix bugs to let feature tests TestCommonLib.TestHdfsConfig and TestCommonLib.TestYanConfig run in concourse

2016-08-24 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master b29dc418a -> 48c34ca64


HAWQ-1020. Fix bugs to let feature tests TestCommonLib.TestHdfsConfig and 
TestCommonLib.TestYanConfig run in concourse


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/48c34ca6
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/48c34ca6
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/48c34ca6

Branch: refs/heads/master
Commit: 48c34ca64f1b07e70e4068cb555d5124e6ebcd37
Parents: b29dc41
Author: Chunling Wang 
Authored: Thu Aug 25 14:11:16 2016 +0800
Committer: Chunling Wang 
Committed: Thu Aug 25 14:31:00 2016 +0800

--
 src/test/feature/lib/hdfs_config.cpp  |  8 
 src/test/feature/lib/yarn_config.cpp  |  6 +++---
 src/test/feature/testlib/test_lib.cpp | 22 --
 3 files changed, 27 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/48c34ca6/src/test/feature/lib/hdfs_config.cpp
--
diff --git a/src/test/feature/lib/hdfs_config.cpp 
b/src/test/feature/lib/hdfs_config.cpp
index ee72a17..cded21e 100644
--- a/src/test/feature/lib/hdfs_config.cpp
+++ b/src/test/feature/lib/hdfs_config.cpp
@@ -19,9 +19,9 @@ void HdfsConfig::runCommand(const string &command,
 string &result) {
   string cmd = "";
   if (ishdfsuser) {
-cmd = "sudo -u ";
+cmd = "/usr/bin/sudo -Eu ";
 cmd.append(getHdfsUser());
-cmd.append(" ");
+cmd.append(" env \"PATH=$PATH\" ");
 cmd.append(command);
   } else {
 cmd = command;
@@ -154,7 +154,7 @@ int HdfsConfig::isTruncate() {
 
 string HdfsConfig::getHadoopHome() {
   string result = "";
-  runCommand("ps -ef|grep hadoop", true, result);
+  runCommand("ps -ef|grep hadoop", false, result);
   string hadoopHome = "";
   auto lines = hawq::test::split(result, '\n');
   for (size_t i=0; ihttp://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/48c34ca6/src/test/feature/lib/yarn_config.cpp
--
diff --git a/src/test/feature/lib/yarn_config.cpp 
b/src/test/feature/lib/yarn_config.cpp
index 0587362..ea4d3f0 100644
--- a/src/test/feature/lib/yarn_config.cpp
+++ b/src/test/feature/lib/yarn_config.cpp
@@ -19,9 +19,9 @@ void YarnConfig::runCommand(const string &command,
 string &result) {
   string cmd = "";
   if (isyarnuser) {
-cmd = "sudo -u ";
+cmd = "/usr/bin/sudo -Eu ";
 cmd.append(getYarnUser());
-cmd.append(" ");
+cmd.append(" env \"PATH=$PATH\" ");
 cmd.append(command);
   } else {
 cmd = command;
@@ -184,7 +184,7 @@ bool YarnConfig::getStandbyRM(string &standbyRM,
 bool YarnConfig::getHARM(const string &RMtype,
string &RM,
int &port) {
-  if (!isHA()) {
+  if (isHA() <= 0) {
 return false;
   }
   string RMService = "";

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/48c34ca6/src/test/feature/testlib/test_lib.cpp
--
diff --git a/src/test/feature/testlib/test_lib.cpp 
b/src/test/feature/testlib/test_lib.cpp
index 03a254f..6c828f6 100644
--- a/src/test/feature/testlib/test_lib.cpp
+++ b/src/test/feature/testlib/test_lib.cpp
@@ -64,6 +64,13 @@ TEST_F(TestCommonLib, TestHdfsConfig) {
   hc.isConfigKerberos();
   hc.isTruncate();
   std::string hadoopHome = hc.getHadoopHome();
+  /* grant privilege for $HADOOP_HOME/etc/hadoop/hdfs-site.xml */
+  std::string confPath = hadoopHome;
+  confPath.append("/etc/hadoop/hdfs-site.xml");
+  std::string cmd = "/usr/bin/sudo -Eu root env \"PATH=$PATH\" chmod 777 ";
+  cmd.append(confPath);
+  hawq::test::Command c(cmd);
+  std::string result = c.run().getResultOutput();
 
   std::string hostname = "";
   int port = 0;
@@ -87,8 +94,10 @@ TEST_F(TestCommonLib, TestHdfsConfig) {
 
   hc.isSafemode();
 
-  hc.getParameterValue("dfs.replication");
+  std::string defaultValue = hc.getParameterValue("dfs.replication");
   hc.setParameterValue("dfs.replication", "1");
+  hc.getParameterValue("dfs.replication");
+  hc.setParameterValue("dfs.replication", defaultValue);
 }
 
 TEST_F(TestCommonLib, TestYarnConfig) {
@@ -99,6 +108,13 @@ TEST_F(TestCommonLib, TestYarnConfig) {
   hc.isHA();
   hc.isConfigKerberos();
   std::string hadoopHome = hc.getHadoopHome();
+  /* grant privilege for $HADOOP_HOME/etc/hadoop/yarn-site.xml */
+  std::string confPath = hadoopHome;
+  confPath.append("/etc/hadoop/yarn-site.xml");
+  std::string cmd = "/usr/bin/sudo -Eu root env \"PATH=$PATH\" chmod 777 ";
+  cmd.append(confPath);
+  hawq::test::Command c(cmd);
+  std::string result = c.run().getRe

incubator-hawq git commit: HAWQ-1012. Append test yaml files for hawq register feature tests.

2016-08-24 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 7e0c63adc -> b29dc418a


HAWQ-1012. Append test yaml files for hawq register feature tests.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/b29dc418
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/b29dc418
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/b29dc418

Branch: refs/heads/master
Commit: b29dc418a08f352618b732e6889a6ca4a1d6a627
Parents: 7e0c63a
Author: xunzhang 
Authored: Thu Aug 25 10:33:29 2016 +0800
Committer: xunzhang 
Committed: Thu Aug 25 10:34:26 2016 +0800

--
 src/test/feature/ManagementTool/incorrect1.yml | 21 
 src/test/feature/ManagementTool/incorrect2.yml | 21 
 src/test/feature/ManagementTool/incorrect3.yml | 21 
 src/test/feature/ManagementTool/incorrect4.yml | 21 
 src/test/feature/ManagementTool/incorrect5.yml | 21 
 src/test/feature/ManagementTool/incorrect6.yml | 22 +
 6 files changed, 127 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/b29dc418/src/test/feature/ManagementTool/incorrect1.yml
--
diff --git a/src/test/feature/ManagementTool/incorrect1.yml 
b/src/test/feature/ManagementTool/incorrect1.yml
new file mode 100755
index 000..8258a90
--- /dev/null
+++ b/src/test/feature/ManagementTool/incorrect1.yml
@@ -0,0 +1,21 @@
+DBVersion: PostgreSQL 8.2.15 (Greenplum Database 4.2.0 build 1) (HAWQ 2.0.1.0 
build
+  dev) on x86_64-apple-darwin15.5.0, compiled by GCC Apple LLVM version 7.3.0 
(clang-703.0.31)
+  compiled on Jul 25 2016 13:00:28
+DFS_URL: hdfs://localhost:8020
+Distribution_Policy: DISTRIBUTED RANDOMLY
+Encoding: UTF8
+FileFormat: Parquet
+Parquet_FileLocations:
+  Checksum: false
+  CompressionLevel: 0
+  CompressionType: null
+  EnableDictionary: false
+  Files:
+  - path: /hawq_default/16385/16387/35983/1
+size: 945
+  RowGroupSize: 8388608
+Parquet_Schema:
+- name: i
+  type: int4
+TableName: public.hawqregister
+Version: 1.0.0

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/b29dc418/src/test/feature/ManagementTool/incorrect2.yml
--
diff --git a/src/test/feature/ManagementTool/incorrect2.yml 
b/src/test/feature/ManagementTool/incorrect2.yml
new file mode 100755
index 000..8258a90
--- /dev/null
+++ b/src/test/feature/ManagementTool/incorrect2.yml
@@ -0,0 +1,21 @@
+DBVersion: PostgreSQL 8.2.15 (Greenplum Database 4.2.0 build 1) (HAWQ 2.0.1.0 
build
+  dev) on x86_64-apple-darwin15.5.0, compiled by GCC Apple LLVM version 7.3.0 
(clang-703.0.31)
+  compiled on Jul 25 2016 13:00:28
+DFS_URL: hdfs://localhost:8020
+Distribution_Policy: DISTRIBUTED RANDOMLY
+Encoding: UTF8
+FileFormat: Parquet
+Parquet_FileLocations:
+  Checksum: false
+  CompressionLevel: 0
+  CompressionType: null
+  EnableDictionary: false
+  Files:
+  - path: /hawq_default/16385/16387/35983/1
+size: 945
+  RowGroupSize: 8388608
+Parquet_Schema:
+- name: i
+  type: int4
+TableName: public.hawqregister
+Version: 1.0.0

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/b29dc418/src/test/feature/ManagementTool/incorrect3.yml
--
diff --git a/src/test/feature/ManagementTool/incorrect3.yml 
b/src/test/feature/ManagementTool/incorrect3.yml
new file mode 100755
index 000..f45b5e5
--- /dev/null
+++ b/src/test/feature/ManagementTool/incorrect3.yml
@@ -0,0 +1,21 @@
+DBVersion: PostgreSQL 8.2.15 (Greenplum Database 4.2.0 build 1) (HAWQ 2.0.1.0 
build
+  dev) on x86_64-apple-darwin15.5.0, compiled by GCC Apple LLVM version 7.3.0 
(clang-703.0.31)
+  compiled on Jul 25 2016 13:00:28
+DFS_URL: hdfs://localhost:8020
+Distribution_Policy: DISTRIBUTED RANDOMLY
+Encoding: UTF8
+FileFormat: Parquet
+Parquet_FileLocations:
+  Checksum: false
+  CompressionLevel: 0
+  CompressionType: null
+  EnableDictionary: false
+  Files:
+  - path: /hawq_default/16385/16387/35983/1
+  PageSize: 1048576
+  RowGroupSize: 8388608
+Parquet_Schema:
+- name: i
+  type: int4
+TableName: public.hawqregister
+Version: 1.0.0

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/b29dc418/src/test/feature/ManagementTool/incorrect4.yml
--
diff --git a/src/test/feature/ManagementTool/incorrect4.yml 
b/src/test/feature/ManagementTool/incorrect4.yml
new file mode 100755
index 000..3b8b921
--- /dev/null
+++ b/src/test/feature/ManagementTool/incorrect4.yml
@@ -0,0 +1,21 @@
+DBVersion: PostgreSQL 8.2.15 (Greenplum Database 4.2.0 build 1) (HAWQ 2.0.1.0 
build
+  dev) on x

[1/2] incubator-hawq git commit: HAWQ-1012. Check whether the input yaml file for hawq register is valid.

2016-08-24 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master c2280debb -> 7e0c63adc


HAWQ-1012. Check whether the input yaml file for hawq register is valid.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/31c3cde5
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/31c3cde5
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/31c3cde5

Branch: refs/heads/master
Commit: 31c3cde5565a26023bd314c64cccfcd669032680
Parents: c2280de
Author: xunzhang 
Authored: Wed Aug 24 11:20:44 2016 +0800
Committer: Ruilong Huo 
Committed: Thu Aug 25 10:24:34 2016 +0800

--
 tools/bin/hawqregister | 110 ++--
 1 file changed, 75 insertions(+), 35 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/31c3cde5/tools/bin/hawqregister
--
diff --git a/tools/bin/hawqregister b/tools/bin/hawqregister
index 7a20906..c2692d8 100755
--- a/tools/bin/hawqregister
+++ b/tools/bin/hawqregister
@@ -54,59 +54,96 @@ def option_parser():
 return parser
 
 
+def register_yaml_dict_check(D):
+# check exists
+check_list = ['DFS_URL', 'Distribution_Policy', 'FileFormat', 'TableName']
+for attr in check_list:
+if D.get(attr) == None:
+logger.error('Wrong configuration yaml file format: "%s" attribute 
does not exist.\n See example in "hawq register --help".' % attr)
+sys.exit(1)
+if D['FileFormat'] in ['Parquet', 'AO']:
+prefix = D['FileFormat']
+local_check_list = ['%s_FileLocations' % prefix, '%s_Schema' % prefix]
+for attr in local_check_list:
+if D.get(attr) == None:
+logger.error('Wrong configuration yaml file format: "%s" 
attribute does not exist.\n See example in "hawq register --help".' % attr)
+sys.exit(1)
+if D['%s_FileLocations' % prefix].get('Files') == None:
+logger.error('Wrong configuration yaml file format: "%s" attribute 
does not exist.\n See example in "hawq register --help".' % 
'%s_FileLocations.Files' % prefix)
+sys.exit(1)
+for d in D['%s_FileLocations' % prefix]['Files']:
+if d.get('path') == None:
+logger.error('Wrong configuration yaml file format: "%s" 
attribute does not exist.\n See example in "hawq register --help".' % 
'%s_FileLocations.Files.path' % prefix)
+sys.exit(1)
+if d.get('size') == None:
+logger.error('Wrong configuration yaml file format: "%s" 
attribute does not exist.\n See example in "hawq register --help".' % 
'%s_FileLocations.Files.size' % prefix)
+sys.exit(1)
+else:
+logger.error('hawq register only support Parquet and AO formats. 
Format %s is not supported.' % D['FileFormat'])
+sys.exit(1)
+prefix = D['FileFormat']
+if D.get('%s_Schema' % prefix) == None:
+logger.error('Wrong configuration yaml file format: "%s" attribute 
does not exist.\n See example in "hawq register --help".' % '%s_Schema' % 
prefix)
+sys.exit(1)
+for d in D['%s_Schema' % prefix]:
+if d.get('name') == None:
+logger.error('Wrong configuration yaml file format: "%s" attribute 
does not exist.\n See example in "hawq register --help".' % '%s_Schema.name' % 
prefix)
+sys.exit(1)
+if d.get('type') == None:
+logger.error('Wrong configuration yaml file format: "%s" attribute 
does not exist.\n See example in "hawq register --help".' % '%s_Schema.type' % 
prefix)
+sys.exit(1)
+if D['FileFormat'] == 'Parquet':
+sub_check_list = ['CompressionLevel', 'CompressionType', 'PageSize', 
'RowGroupSize']
+for attr in sub_check_list:
+if not D['Parquet_FileLocations'].has_key(attr):
+logger.error('Wrong configuration yaml file format: "%s" 
attribute does not exist.\n See example in "hawq register --help".' % 
'Parquet_FileLocations.%s' % attr)
+sys.exit(1)
+else:
+sub_check_list = ['Checksum', 'CompressionLevel', 'CompressionType']
+for attr in sub_check_list:
+if not D['AO_FileLocations'].has_key(attr):
+logger.error('Wrong configuration yaml file format: "%s" 
attribute does not exist.\n See example in "hawq register --help".' % 
'AO_FileLocations.%s' % attr)
+sys.exit(1)
+
+
+
 def option_parser_yml(yml_file):
 import yaml
 with open(yml_file, 'r') as f:
 params = yaml.load(f)
-# check if valid configuration yaml file
-attrs = ['FileFormat', 'DFS_URL', 'Distribution_Policy']
-for attr in attrs:
-if attr not in params.keys():
-logger.error

[2/2] incubator-hawq git commit: HAWQ-1012. Update feature tests for hawq register.

2016-08-24 Thread huor
HAWQ-1012. Update feature tests for hawq register.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/7e0c63ad
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/7e0c63ad
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/7e0c63ad

Branch: refs/heads/master
Commit: 7e0c63adc7b7d117517a64905e20bb23229fb368
Parents: 31c3cde
Author: xunzhang 
Authored: Wed Aug 24 17:14:37 2016 +0800
Committer: Ruilong Huo 
Committed: Thu Aug 25 10:25:09 2016 +0800

--
 .../ManagementTool/test_hawq_register.cpp   | 24 
 tools/bin/hawqregister  |  2 +-
 2 files changed, 25 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7e0c63ad/src/test/feature/ManagementTool/test_hawq_register.cpp
--
diff --git a/src/test/feature/ManagementTool/test_hawq_register.cpp 
b/src/test/feature/ManagementTool/test_hawq_register.cpp
index 00934a9..434f817 100644
--- a/src/test/feature/ManagementTool/test_hawq_register.cpp
+++ b/src/test/feature/ManagementTool/test_hawq_register.cpp
@@ -2,6 +2,7 @@
 
 #include "lib/command.h"
 #include "lib/sql_util.h"
+#include "lib/string_util.h"
 
 #include "gtest/gtest.h"
 
@@ -317,3 +318,26 @@ TEST_F(TestHawqRegister, TestUsage2AOHash2) {
   util.execute("drop table t8;");
   util.execute("drop table nt8;");
 }
+
+TEST_F(TestHawqRegister, TestEmptyTable) {
+  SQLUtility util;
+  util.execute("drop table if exists t9;");
+  util.execute("create table t9(i int) with (appendonly=true, orientation=row) 
distributed randomly;");
+  EXPECT_EQ(0, Command::getCommandStatus("hawq extract -d " + (string) HAWQ_DB 
+ " -o t9.yml testhawqregister_testemptytable.t9"));
+  EXPECT_EQ(0, Command::getCommandStatus("hawq register -d " + (string) 
HAWQ_DB + " -c t9.yml testhawqregister_testemptytable.nt9"));
+  util.query("select * from nt9;", 0);
+  EXPECT_EQ(0, Command::getCommandStatus("rm -rf t9.yml"));
+  util.execute("drop table t9;");
+  util.execute("drop table nt9;");
+}
+
+TEST_F(TestHawqRegister, TestIncorrectYaml) {
+  SQLUtility util;
+  string filePath = util.getTestRootPath() + "/ManagementTool/";
+  EXPECT_EQ(0, hawq::test::endsWith(Command::getCommandOutput("hawq register 
-d " + (string) HAWQ_DB + " -c " + filePath + "incorrect1.yml xx"), "attribute 
does not exist."));
+  EXPECT_EQ(0, hawq::test::endsWith(Command::getCommandOutput("hawq register 
-d " + (string) HAWQ_DB + " -c " + filePath + "incorrect2.yml xx"), "attribute 
does not exist."));
+  EXPECT_EQ(0, hawq::test::endsWith(Command::getCommandOutput("hawq register 
-d " + (string) HAWQ_DB + " -c " + filePath + "incorrect3.yml xx"), "attribute 
does not exist."));
+  EXPECT_EQ(0, hawq::test::endsWith(Command::getCommandOutput("hawq register 
-d " + (string) HAWQ_DB + " -c " + filePath + "incorrect4.yml xx"), "attribute 
does not exist."));
+  EXPECT_EQ(0, hawq::test::endsWith(Command::getCommandOutput("hawq register 
-d " + (string) HAWQ_DB + " -c " + filePath + "incorrect5.yml xx"), "attribute 
does not exist."));
+  EXPECT_EQ(1, Command::getCommandStatus("hawq register -d " + (string) 
HAWQ_DB + " -c " + filePath + "incorrect6.yml xx"));
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7e0c63ad/tools/bin/hawqregister
--
diff --git a/tools/bin/hawqregister b/tools/bin/hawqregister
index c2692d8..26284a8 100755
--- a/tools/bin/hawqregister
+++ b/tools/bin/hawqregister
@@ -359,7 +359,7 @@ if __name__ == '__main__':
 print 'File(s) to be registered:', files
 if fileformat == 'Parquet':
 check_parquet_format(files)
-print files
+print files
 move_files_in_hdfs(database, tablename, files, firstsegno, tabledir, True)
 insert_metadata_into_database(dburl, database, tablename, seg_name, 
firstsegno, tabledir, sizes)
 logger.info('Hawq Register Succeed.')



incubator-hawq git commit: HAWQ-939. Add coverity scan badge

2016-08-17 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master a1a2f2c5c -> 48bf8b618


HAWQ-939. Add coverity scan badge


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/48bf8b61
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/48bf8b61
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/48bf8b61

Branch: refs/heads/master
Commit: 48bf8b61880d905b1a1859cdfc27244b0735b9d6
Parents: a1a2f2c
Author: xunzhang 
Authored: Tue Aug 9 13:51:23 2016 +0800
Committer: Ruilong Huo 
Committed: Thu Aug 18 14:44:20 2016 +0800

--
 README.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/48bf8b61/README.md
--
diff --git a/README.md b/README.md
index 8175a17..eafb027 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,4 @@
-![HAWQ](http://hawq.incubator.apache.org/images/logo-hawq.png) 
[![https://travis-ci.org/apache/incubator-hawq.png](https://travis-ci.org/apache/incubator-hawq.png)](https://travis-ci.org/apache/incubator-hawq)
+![HAWQ](http://hawq.incubator.apache.org/images/logo-hawq.png) 
[![https://travis-ci.org/apache/incubator-hawq.png](https://travis-ci.org/apache/incubator-hawq.png)](https://travis-ci.org/apache/incubator-hawq)
 [![Coverity Scan 
Build](https://scan.coverity.com/projects/apache-incubator-hawq/badge.svg)](https://scan.coverity.com/projects/apache-incubator-hawq)
 
 [Website](http://hawq.incubator.apache.org/) |
 [Wiki](https://cwiki.apache.org/confluence/display/HAWQ/Apache+HAWQ+Home) |



[1/2] incubator-hawq git commit: HAWQ-1005. Add schema info with Parquet format in hawqextract.

2016-08-15 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 189a2c3a0 -> a5a2e6d13


HAWQ-1005. Add schema info with Parquet format in hawqextract.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/ff1419c1
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/ff1419c1
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/ff1419c1

Branch: refs/heads/master
Commit: ff1419c192ffdf7105158262470c85af507f851b
Parents: 189a2c3
Author: xunzhang 
Authored: Mon Aug 15 17:09:39 2016 +0800
Committer: xunzhang 
Committed: Mon Aug 15 17:09:39 2016 +0800

--
 tools/bin/hawqextract | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/ff1419c1/tools/bin/hawqextract
--
diff --git a/tools/bin/hawqextract b/tools/bin/hawqextract
index 8dc471c..699c713 100755
--- a/tools/bin/hawqextract
+++ b/tools/bin/hawqextract
@@ -434,7 +434,8 @@ def extract_metadata(conn, tbname):
  
p_pgclass['relfilenode'])
 }
 file_locations['Partitions'].append(par_info)
-
+logger.info('-- extract Parquet_Schema')
+metadata['Parquet_Schema'] = accessor.get_schema(relid)
 metadata['Parquet_FileLocations'] = file_locations
 
 # extract AO/Parquet specific metadata



[2/2] incubator-hawq git commit: HAWQ-1005. Add distribution policy info with hawqextract.

2016-08-15 Thread huor
HAWQ-1005. Add distribution policy info with hawqextract.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/a5a2e6d1
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/a5a2e6d1
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/a5a2e6d1

Branch: refs/heads/master
Commit: a5a2e6d13805915eb78f700f090859efd1b1b5b0
Parents: ff1419c
Author: xunzhang 
Authored: Mon Aug 15 17:59:34 2016 +0800
Committer: xunzhang 
Committed: Mon Aug 15 17:59:34 2016 +0800

--
 tools/bin/hawqextract | 58 --
 1 file changed, 41 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a5a2e6d1/tools/bin/hawqextract
--
diff --git a/tools/bin/hawqextract b/tools/bin/hawqextract
index 699c713..f3ffe5b 100755
--- a/tools/bin/hawqextract
+++ b/tools/bin/hawqextract
@@ -7,9 +7,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -28,7 +28,7 @@ Options:
 -o output file: the output metadata file, if not set, will output to 
terminal.
 -W: force password authentication
 -v: verbose
--?: help 
+-?: help
 
 hawq extract output YAML file format:
 
@@ -38,7 +38,7 @@ FileFormat: string (AO/Parquet)
 TableName: string (schemaname.tablename)
 DFS_URL: string (hdfs://127.0.0.1:9000)
 Encoding: UTF8
-AO_Schema: 
+AO_Schema:
 - name: string
   type: string
 
@@ -51,7 +51,7 @@ AO_FileLocations:
   Files:
   - path: string (/gpseg0/16385/35469/35470.1)
 size: long
-  
+
   Partitions:
   - Blocksize: int
 Checksum: boolean
@@ -113,7 +113,7 @@ class GpExtractError(Exception): pass
 class GpMetadataAccessor:
 def __init__(self, conn):
 self.conn = conn
-
+
 rows = self.exec_query("""
 SELECT oid, datname, dat2tablespace,
pg_encoding_to_char(encoding) encoding
@@ -157,7 +157,7 @@ class GpMetadataAccessor:
 ...  {'fileno':'3', 'filesize':'160'}]
 '''
 qry = """
-SELECT segno as fileno, eof as filesize 
+SELECT segno as fileno, eof as filesize
 FROM pg_aoseg.pg_aoseg_%d
 ORDER by fileno;
 """ % oid
@@ -175,7 +175,7 @@ class GpMetadataAccessor:
 ...  {'fileno':'3', 'filesize':'160'}]
 '''
 qry = """
-SELECT segno as fileno, eof as filesize 
+SELECT segno as fileno, eof as filesize
 FROM pg_aoseg.pg_paqseg_%d
 ORDER by fileno;
 """ % oid
@@ -194,7 +194,7 @@ class GpMetadataAccessor:
 if not rows:
 raise GpExtractError('Table %s.%s not exists!' % (nspname, 
relname))
 return rows[0]
-
+
 def get_schema(self, relid):
 '''
 Fetch schema of the table specified by oid `relid`.
@@ -242,6 +242,23 @@ class GpMetadataAccessor:
 """ % (nspname, relname)
 return self.exec_query(qry)
 
+def get_distribution_policy_info(self, oid, relid):
+'''
+Get table's distribution policy from gp_distribution_policy view.
+'''
+qry = """
+SELECT *
+FROM gp_distribution_policy
+WHERE localoid = '%s'
+""" % oid
+policy = self.exec_query(qry)[0]['attrnums']
+if not policy:
+return 'DISTRIBUTED RANDOMLY'
+else:
+cols = [d['name'] for d in self.get_schema(relid)]
+cols_list = [cols[int(k)-1] for k in policy.strip('{}').split(',')]
+return 'DISTRIBUTED BY (' + ','.join(cols_list) + ')'
+
 
 def connectdb(options):
 '''
@@ -301,7 +318,7 @@ def extract_metadata(conn, tbname):
 '''
 Given AO table's oid and relfilenode, return path and size of all its
 data files on HDFS as [{'path': path1, 'size': size1}, {...}].
-
+
 Path doesn't include DFS URL.
 
 Example:
@@ -331,7 +348,7 @@ def extract_metadata(conn, tbname):
 )
 files.append({'path': path, 'size': int(f['filesize'])})
 return files
-
+
 def get_parquet_table_files(oid, relfilenode):
 '''
 The same with get_ao_table_files, except that it's for Parquet table.
@@ -349,7 +366,7 @@ def extract_metadata(conn, tbname):
 )
 files.append({'path': path, 'size': int(f['filesize'])})
 return files
-
+
 def extract_AO_metadata():
   

incubator-hawq git commit: HAWQ-914. Improve user experience of HAWQ's build infrastructure

2016-08-12 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 217c17c9c -> f149c0842


HAWQ-914. Improve user experience of HAWQ's build infrastructure

Improving UE will be an endless work. This patch adds more output info
for various header/library check failures. It also makes some output info
more platform/release neutral.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/f149c084
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/f149c084
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/f149c084

Branch: refs/heads/master
Commit: f149c0842713e54ae6500d9ba773378c56e61efc
Parents: 217c17c
Author: Paul Guo 
Authored: Wed Aug 10 14:48:29 2016 +0800
Committer: Ruilong Huo 
Committed: Fri Aug 12 09:56:39 2016 +0800

--
 configure| 146 ++
 configure.in | 142 +---
 2 files changed, 205 insertions(+), 83 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/f149c084/configure
--
diff --git a/configure b/configure
index abb93e7..22401df 100755
--- a/configure
+++ b/configure
@@ -9026,8 +9026,9 @@ $as_echo_n "checking R_HOME... " >&6; }
if test "x${r_home_guess}" != "x"; then
   R_HOME=$r_home_guess
else
-  as_fn_error $? "Failed to Set R_HOME. R is not installed? R_HOME is not 
set
-or pkg-config is not installed to find R_HOME automatically." "$LINENO" 5
+  as_fn_error $? "Failed to set R_HOME. R is not installed? Or R_HOME is 
not set
+or pkg-config is not installed to find R_HOME automatically.
+Check config.log for details or not use --with-r if you do not want the PL/R 
support." "$LINENO" 5
 fi
   fi
   { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${R_HOME}" >&5
@@ -9103,7 +9104,9 @@ if test "$ac_res" != no; then :
   test "$ac_res" = "none required" || LIBS="$ac_res $LIBS"
 
 else
-  as_fn_error $? "could not find function 'gss_init_sec_context' required for 
GSSAPI" "$LINENO" 5
+  as_fn_error $? "could not find function 'gss_init_sec_context' required for 
GSSAPI.
+Check config.log for details. It is possible the compiler isn't looking in the 
proper directory.
+Don't use --with-gssapi if you don't want the GSSAPI support." "$LINENO" 5
 fi
 
   else
@@ -9168,7 +9171,9 @@ if test "$ac_res" != no; then :
   test "$ac_res" = "none required" || LIBS="$ac_res $LIBS"
 
 else
-  as_fn_error $? "could not find function 'com_err' required for Kerberos 5" 
"$LINENO" 5
+  as_fn_error $? "could not find function 'com_err' required for Kerberos 5.
+Check config.log for details. It is possible the compiler isn't looking in the 
proper directory.
+Don't use --with-krb5 if you don't want the Kerberos 5 support." "$LINENO" 5
 fi
 
  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing 
krb5_sendauth" >&5
@@ -9226,7 +9231,9 @@ if test "$ac_res" != no; then :
   test "$ac_res" = "none required" || LIBS="$ac_res $LIBS"
 
 else
-  as_fn_error $? "could not find function 'krb5_sendauth' required for 
Kerberos 5" "$LINENO" 5
+  as_fn_error $? "could not find function 'krb5_sendauth' required for 
Kerberos 5.
+Check config.log for details. It is possible the compiler isn't looking in the 
proper directory.
+Don't use --with-krb5 if you don't want the Kerberos 5 support." "$LINENO" 5
 fi
 
   else
@@ -9285,7 +9292,9 @@ if test "$ac_res" != no; then :
   test "$ac_res" = "none required" || LIBS="$ac_res $LIBS"
 
 else
-  as_fn_error $? "could not find function 'com_err' required for Kerberos 5" 
"$LINENO" 5
+  as_fn_error $? "could not find function 'com_err' required for Kerberos 5.
+Check config.log for details. It is possible the compiler isn't looking in the 
proper directory.
+Don't use --with-krb5 if you don't want the Kerberos 5 support." "$LINENO" 5
 fi
 
   fi
@@ -9337,7 +9346,9 @@ _ACEOF
   LIBS="-lcrypto $LIBS"
 
 else
-  as_fn_error $? "library 'crypto' is required for OpenSSL" "$LINENO" 5
+  as_fn_error $? "library 'crypto' is required for OpenSSL.
+Check config.log for details. It is possible the compiler isn't looking in the 
proper directory.
+Don't use --with-openssl if you don't want the OpenSSL support." "$LINENO" 5
 fi
 
  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for SSL_library_init in 
-lssl" >&5
@@ -9384,7 +9395,9 @@ _ACEOF
   LIBS="-lssl $LIBS"
 
 else
-  as_fn_error $? "library 'ssl' is required for OpenSSL" "$LINENO" 5
+  as_fn_error $? "library 'ssl' is required for OpenSSL.
+Check config.log for details. It is possible the compiler isn't looking in the 
proper directory.
+Don't use --with-openssl if you don't want the OpenSSL support." "$LINENO" 5
 fi
 
   else
@@ -9432,7 +9445,9 @@ _A

incubator-hawq git commit: HAWQ-998. Fix test for aggregate-with-null.

2016-08-10 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master c742cd716 -> 217c17c9c


HAWQ-998. Fix test for aggregate-with-null.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/217c17c9
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/217c17c9
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/217c17c9

Branch: refs/heads/master
Commit: 217c17c9c6291bc5b9c746bb528cedf9dc43220d
Parents: c742cd7
Author: stanlyxiang 
Authored: Thu Aug 11 14:39:21 2016 +0800
Committer: stanlyxiang 
Committed: Thu Aug 11 14:39:21 2016 +0800

--
 src/test/feature/query/test_aggregate.cpp | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/217c17c9/src/test/feature/query/test_aggregate.cpp
--
diff --git a/src/test/feature/query/test_aggregate.cpp 
b/src/test/feature/query/test_aggregate.cpp
index 2cb24bf..bc2828b 100644
--- a/src/test/feature/query/test_aggregate.cpp
+++ b/src/test/feature/query/test_aggregate.cpp
@@ -67,9 +67,9 @@ TEST_F(TestAggregate, TestAggregateWithNull) {
   dGen.genTableWithNull("t");
 
   util.query(
-  "select SUM(CASE WHEN a = 15 THEN 1 ELSE 0 END), b ,c from t group by "
-  "b,c",
-  "1||aa|\n0||WET|\n0|51||\n");
+  "select SUM(CASE WHEN a = 15 THEN 1 ELSE 0 END) as aa, b ,c from t group 
by b, c "
+  "order by aa, b, c",
+  "0|51||\n0||WET|\n1||aa|\n");
 }
 
 TEST_F(TestAggregate, TestAggregateDerivedWin) {



incubator-hawq git commit: Revert "HAWQ-900. Add dependency in PL/R rpm build spec file plr.spec"

2016-07-28 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master fb7f806fe -> dada9ba99


Revert "HAWQ-900. Add dependency in PL/R rpm build spec file plr.spec"

This reverts commit fb7f806feba74a48e474fead5b84fc0ee72f07f5.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/dada9ba9
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/dada9ba9
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/dada9ba9

Branch: refs/heads/master
Commit: dada9ba9919bc6e402f40163cb15e854ad6e0340
Parents: fb7f806
Author: Ruilong Huo 
Authored: Fri Jul 29 12:58:14 2016 +0800
Committer: Ruilong Huo 
Committed: Fri Jul 29 12:58:14 2016 +0800

--
 src/pl/plr.spec | 4 
 1 file changed, 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/dada9ba9/src/pl/plr.spec
--
diff --git a/src/pl/plr.spec b/src/pl/plr.spec
index a6621d9..e34e8b8 100644
--- a/src/pl/plr.spec
+++ b/src/pl/plr.spec
@@ -4,13 +4,9 @@ Version:08.03.00.14
 Release:   0
 Prefix: /usr/local
 License:   GPL
-BuildRequires:  R-devel
-Requires:   R
 
 %define _unpackaged_files_terminate_build 0
 
-AutoReqProv:no
-
 %description
 The PL/R modules provides Procedural language implementation of R for HAWQ.
 



incubator-hawq git commit: HAWQ-922. Fix sql utility issue which incur failure of basic verification for various pl and udf in HAWQ

2016-07-27 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 7ecf59aa9 -> 9fac5349b


HAWQ-922. Fix sql utility issue which incur failure of basic verification for 
various pl and udf in HAWQ


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/9fac5349
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/9fac5349
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/9fac5349

Branch: refs/heads/master
Commit: 9fac5349b6104332717b01661767497a9c8cd422
Parents: 7ecf59a
Author: Ruilong Huo 
Authored: Thu Jul 28 13:38:44 2016 +0800
Committer: Ruilong Huo 
Committed: Thu Jul 28 13:38:44 2016 +0800

--
 src/test/feature/lib/sql_util.cpp | 1 -
 1 file changed, 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/9fac5349/src/test/feature/lib/sql_util.cpp
--
diff --git a/src/test/feature/lib/sql_util.cpp 
b/src/test/feature/lib/sql_util.cpp
index 36f33a7..37d8d49 100644
--- a/src/test/feature/lib/sql_util.cpp
+++ b/src/test/feature/lib/sql_util.cpp
@@ -73,7 +73,6 @@ string SQLUtility::execute(const string &sql, bool check) {
 EXPECT_EQ(0, conn->getLastStatus()) << conn->getLastResult();
 return "";
   }
-  EXPECT_NE(0,  conn->getLastStatus());
   return conn.get()->getLastResult();
 }
 



incubator-hawq git commit: HAWQ-922. Add basic verification for various pl and udf in HAWQ

2016-07-27 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 4de3d107c -> 7ecf59aa9


HAWQ-922. Add basic verification for various pl and udf in HAWQ


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/7ecf59aa
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/7ecf59aa
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/7ecf59aa

Branch: refs/heads/master
Commit: 7ecf59aa95b287949c9ae4a97d81eca55ecde21f
Parents: 4de3d10
Author: Ruilong Huo 
Authored: Mon Jul 25 15:20:00 2016 +0800
Committer: Ruilong Huo 
Committed: Thu Jul 28 11:18:12 2016 +0800

--
 src/test/feature/UDF/TestUDF.cpp| 218 ++-
 src/test/feature/UDF/ans/function_c.ans.source  |  15 ++
 src/test/feature/UDF/ans/function_internal.ans  |  16 ++
 src/test/feature/UDF/ans/function_pgcrypto.ans  |  12 +
 src/test/feature/UDF/ans/function_pljava.ans|  21 ++
 src/test/feature/UDF/ans/function_pljavau.ans   |  21 ++
 src/test/feature/UDF/ans/function_plperl.ans|  35 +++
 src/test/feature/UDF/ans/function_plperlu.ans   |  35 +++
 src/test/feature/UDF/ans/function_plpgsql.ans   |  18 ++
 src/test/feature/UDF/ans/function_plpythonu.ans |  18 ++
 src/test/feature/UDF/ans/function_plr.ans   |  20 ++
 src/test/feature/UDF/ans/function_sql.ans   |  24 ++
 src/test/feature/UDF/lib/function.c |  12 +-
 src/test/feature/UDF/sql/PLJavaAdd.jar  | Bin 0 -> 654 bytes
 src/test/feature/UDF/sql/PLJavaAdd.java |   7 +
 src/test/feature/UDF/sql/PLJavauAdd.jar | Bin 0 -> 657 bytes
 src/test/feature/UDF/sql/PLJavauAdd.java|   7 +
 src/test/feature/UDF/sql/function_c.sql.source  |   6 +
 src/test/feature/UDF/sql/function_internal.sql  |   7 +
 src/test/feature/UDF/sql/function_pgcrypto.sql  |   2 +
 src/test/feature/UDF/sql/function_pljava.sql|   8 +
 src/test/feature/UDF/sql/function_pljavau.sql   |   8 +
 src/test/feature/UDF/sql/function_plperl.sql|  22 ++
 src/test/feature/UDF/sql/function_plperlu.sql   |  22 ++
 src/test/feature/UDF/sql/function_plpgsql.sql   |   9 +
 src/test/feature/UDF/sql/function_plpythonu.sql |   9 +
 src/test/feature/UDF/sql/function_plr.sql   |  11 +
 src/test/feature/UDF/sql/function_sql.sql   |  13 ++
 src/test/feature/lib/hawq_scp.cpp   |  24 ++
 src/test/feature/lib/hawq_scp.h |  23 ++
 src/test/feature/lib/sql_util.cpp   |  33 +++
 src/test/feature/lib/sql_util.h |  10 +
 32 files changed, 683 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7ecf59aa/src/test/feature/UDF/TestUDF.cpp
--
diff --git a/src/test/feature/UDF/TestUDF.cpp b/src/test/feature/UDF/TestUDF.cpp
index ce7f158..7107b63 100755
--- a/src/test/feature/UDF/TestUDF.cpp
+++ b/src/test/feature/UDF/TestUDF.cpp
@@ -1,7 +1,9 @@
 #include "gtest/gtest.h"
 
+#include "lib/command.h"
 #include "lib/sql_util.h"
 #include "lib/file_replace.h"
+#include "lib/hawq_scp.h"
 
 
 class TestUDF: public ::testing::Test
@@ -51,8 +53,18 @@ TEST_F(TestUDF, TestUDFCreation)
 TEST_F(TestUDF, TestUDFSetReturning)
 {
hawq::test::SQLUtility util;
-   util.execSQLFile("UDF/sql/function_set_returning.sql",
-"UDF/ans/function_set_returning.ans");
+   // enable plpythonu language if it is absent
+   if (util.getQueryResult("SELECT lanname FROM pg_language WHERE lanname 
= 'plpythonu'") != "plpythonu")
+   {
+   util.execute("CREATE LANGUAGE plpythonu", false);
+   }
+
+   // run test if plpythonu language is enabled
+   if (util.getQueryResult("SELECT lanname FROM pg_language WHERE lanname 
= 'plpythonu'") == "plpythonu")
+   {
+   util.execSQLFile("UDF/sql/function_set_returning.sql",
+"UDF/ans/function_set_returning.ans");
+   }
 }
 
 TEST_F(TestUDF, TestUDFExtension)
@@ -61,3 +73,205 @@ TEST_F(TestUDF, TestUDFExtension)
util.execSQLFile("UDF/sql/function_extension.sql",
 "UDF/ans/function_extension.ans");
 }
+
+TEST_F(TestUDF, TestUDFInternal)
+{
+   hawq::test::SQLUtility util;
+   util.execSQLFile("UDF/sql/function_internal.sql",
+"UDF/ans/function_internal.ans");
+}
+
+TEST_F(TestUDF, TestUDFC)
+{
+   // preprocess source files to get sql/ans files
+   hawq::test::SQLUtility util;
+   std::string d_feature_test_root(util.getTestRootPath());
+   std::string f_sql_tpl(d_feature_test_root + 
"/UDF/sql/function_c.sql.source");
+   std::string f_ans_tpl(d_feature_test_root + 
"/UDF/ans/function_c.ans.source");
+   std::string f_sql(d_feature_test_root + "/UDF/sql/function_

incubator-hawq git commit: HAWQ-933. Warning fix for gpnetbench.

2016-07-18 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master f6223ad74 -> 69f3c9026


HAWQ-933. Warning fix for gpnetbench.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/69f3c902
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/69f3c902
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/69f3c902

Branch: refs/heads/master
Commit: 69f3c9026892bc9c6937ed132fa45716d7912a8f
Parents: f6223ad
Author: xunzhang 
Authored: Tue Jul 19 09:49:38 2016 +0800
Committer: xunzhang 
Committed: Tue Jul 19 09:49:38 2016 +0800

--
 tools/gpnetbench/gpnetbenchClient.c | 36 +---
 tools/gpnetbench/gpnetbenchServer.c | 10 +
 2 files changed, 25 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/69f3c902/tools/gpnetbench/gpnetbenchClient.c
--
diff --git a/tools/gpnetbench/gpnetbenchClient.c 
b/tools/gpnetbench/gpnetbenchClient.c
index d95f8a9..4669d16 100644
--- a/tools/gpnetbench/gpnetbenchClient.c
+++ b/tools/gpnetbench/gpnetbenchClient.c
@@ -29,25 +29,11 @@
 #include 
 
 #define INIT_RETRIES 5
+void usage(void);
 void send_buffer(int fd, char* buffer, int bytes);
-void print_headers();
+void print_headers(void);
 double subtractTimeOfDay(struct timeval* begin, struct timeval* end);
 
-void usage()
-{
-   fprintf(stdout, "usage: gpnetbench -p PORT -H HOST [-l SECONDS] [-t 
EXPERIMENT] [-f UNITS] [-P HEADERS] [-b KB] [-h]\n");
-   fprintf(stdout, "where\n");
-   fprintf(stdout, "   PORT is the port to connect to for the 
server\n");
-   fprintf(stdout, "   HOST is the hostname to connect to for the 
server\n");
-   fprintf(stdout, "   SECONDS is the number of seconds to sample the 
network, where the default is 60\n");
-   fprintf(stdout, "   EXPERIMENT is the experiment name to run, where 
the default is TCP_STREAM\n");
-   fprintf(stdout, "   UNITS is the output units, where the default is 
M megabytes\n");
-   fprintf(stdout, "   HEADERS is 0 (don't) or 1 (do) display headers 
in the output\n");
-   fprintf(stdout, "   KB is the size of the send buffer in kilobytes, 
where the default is 32\n");
-
-   fprintf(stdout, "   -h shows this help message\n");
-}
-
 int main(int argc, char** argv)
 {
int socketFd;
@@ -188,9 +174,24 @@ int main(int argc, char** argv)
return 0;
 }
 
+void usage()
+{
+   fprintf(stdout, "usage: gpnetbench -p PORT -H HOST [-l SECONDS] [-t 
EXPERIMENT] [-f UNITS] [-P HEADERS] [-b KB] [-h]\n");
+   fprintf(stdout, "where\n");
+   fprintf(stdout, "   PORT is the port to connect to for the 
server\n");
+   fprintf(stdout, "   HOST is the hostname to connect to for the 
server\n");
+   fprintf(stdout, "   SECONDS is the number of seconds to sample the 
network, where the default is 60\n");
+   fprintf(stdout, "   EXPERIMENT is the experiment name to run, where 
the default is TCP_STREAM\n");
+   fprintf(stdout, "   UNITS is the output units, where the default is 
M megabytes\n");
+   fprintf(stdout, "   HEADERS is 0 (don't) or 1 (do) display headers 
in the output\n");
+   fprintf(stdout, "   KB is the size of the send buffer in kilobytes, 
where the default is 32\n");
+
+   fprintf(stdout, "   -h shows this help message\n");
+}
+
 void send_buffer(int fd, char* buffer, int bytes)
 {
-   size_t retval;
+   ssize_t retval;
 
while(bytes > 0)
{
@@ -235,3 +236,4 @@ void print_headers()
printf("   Size Time Throughput\n");
printf("n/a   n/a  bytessecs.MBytes/sec\n");
 }
+

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/69f3c902/tools/gpnetbench/gpnetbenchServer.c
--
diff --git a/tools/gpnetbench/gpnetbenchServer.c 
b/tools/gpnetbench/gpnetbenchServer.c
index abe1afd..e25c13b 100644
--- a/tools/gpnetbench/gpnetbenchServer.c
+++ b/tools/gpnetbench/gpnetbenchServer.c
@@ -31,10 +31,7 @@ char* receiveBuffer = NULL;
 
 void handleIncomingConnection(int fd);
 
-void usage()
-{
-   fprintf(stdout, "usage: gpnetbenchServer -p PORT [-h]\n");
-}
+void usage(void);
 
 int main(int argc, char** argv)
 {
@@ -149,6 +146,11 @@ int main(int argc, char** argv)
return 0;
 }
 
+void usage()
+{
+   fprintf(stdout, "usage: gpnetbenchServer -p PORT [-h]\n");
+}
+
 void handleIncomingConnection(int fd)
 {
ssize_t bytes;



[1/2] incubator-hawq git commit: HAWQ-622. Update libhdfs3 readme, clean old strings.

2016-07-17 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 5506a2281 -> 15323a501


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/15323a50/depends/libhdfs3/src/common/ExceptionInternal.cpp
--
diff --git a/depends/libhdfs3/src/common/ExceptionInternal.cpp 
b/depends/libhdfs3/src/common/ExceptionInternal.cpp
index 8306dcd..b1b67a7 100644
--- a/depends/libhdfs3/src/common/ExceptionInternal.cpp
+++ b/depends/libhdfs3/src/common/ExceptionInternal.cpp
@@ -1,10 +1,4 @@
 /
- * Copyright (c) 2013 - 2014, Pivotal Inc.
- * All rights reserved.
- *
- * Author: Zhanwei Wang
- /
-/
  * 2014 -
  * open source under Apache License Version 2.0
  /

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/15323a50/depends/libhdfs3/src/common/ExceptionInternal.h
--
diff --git a/depends/libhdfs3/src/common/ExceptionInternal.h 
b/depends/libhdfs3/src/common/ExceptionInternal.h
index 9d734af..fd791a0 100644
--- a/depends/libhdfs3/src/common/ExceptionInternal.h
+++ b/depends/libhdfs3/src/common/ExceptionInternal.h
@@ -1,10 +1,4 @@
 /
- * Copyright (c) 2013 - 2014, Pivotal Inc.
- * All rights reserved.
- *
- * Author: Zhanwei Wang
- /
-/
  * 2014 -
  * open source under Apache License Version 2.0
  /

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/15323a50/depends/libhdfs3/src/common/FileWrapper.h
--
diff --git a/depends/libhdfs3/src/common/FileWrapper.h 
b/depends/libhdfs3/src/common/FileWrapper.h
index dc14a45..124db7f 100644
--- a/depends/libhdfs3/src/common/FileWrapper.h
+++ b/depends/libhdfs3/src/common/FileWrapper.h
@@ -1,10 +1,4 @@
 /
- * Copyright (c) 2013 - 2014, Pivotal Inc.
- * All rights reserved.
- *
- * Author: Zhanwei Wang
- /
-/
  * 2014 -
  * open source under Apache License Version 2.0
  /

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/15323a50/depends/libhdfs3/src/common/Function.h
--
diff --git a/depends/libhdfs3/src/common/Function.h 
b/depends/libhdfs3/src/common/Function.h
index 9a50e9d..59522ec 100644
--- a/depends/libhdfs3/src/common/Function.h
+++ b/depends/libhdfs3/src/common/Function.h
@@ -1,10 +1,4 @@
 /
- * Copyright (c) 2013 - 2014, Pivotal Inc.
- * All rights reserved.
- *
- * Author: Zhanwei Wang
- /
-/
  * 2014 -
  * open source under Apache License Version 2.0
  /

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/15323a50/depends/libhdfs3/src/common/HWCrc32c.cpp
--
diff --git a/depends/libhdfs3/src/common/HWCrc32c.cpp 
b/depends/libhdfs3/src/common/HWCrc32c.cpp
index 4e3f270..9573c3c 100644
--- a/depends/libhdfs3/src/common/HWCrc32c.cpp
+++ b/depends/libhdfs3/src/common/HWCrc32c.cpp
@@ -1,10 +1,4 @@
 /
- * Copyright (c) 2013 - 2014, Pivotal Inc.
- * All rights reserved.
- *
- * Author: Zhanwei Wang
- /
-/
  * 2014 -
  * open source under Apache License Version 2.0
  /

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/15323a50/depends/libhdfs3/src/common/HWCrc32c.h
--
diff --git a/depends/libhdfs3/src/common/HWCrc32c.h 
b/depends/libhdfs3/src/common/HWCrc32c.h
index 3f0582c..e218244 100644
--- a/depends/libhdfs3/src/common/HWCrc32c.h
+++ b/depends/libhdfs3/src/common/HWCrc32c.h
@@ -1,10 +1,4 @@
 /
- * Copyright (c) 2013 - 2014, Piv

[2/2] incubator-hawq git commit: HAWQ-622. Update libhdfs3 readme, clean old strings.

2016-07-17 Thread huor
HAWQ-622. Update libhdfs3 readme, clean old strings.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/15323a50
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/15323a50
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/15323a50

Branch: refs/heads/master
Commit: 15323a501392fc82717dc0b1399497670198e231
Parents: 5506a22
Author: xunzhang 
Authored: Fri Jul 15 18:09:29 2016 +0800
Committer: Ruilong Huo 
Committed: Mon Jul 18 09:55:34 2016 +0800

--
 depends/libhdfs3/README.md | 5 -
 depends/libhdfs3/debian/control| 3 ---
 depends/libhdfs3/debian/copyright  | 2 --
 depends/libhdfs3/mock/MockBufferedSocketReader.h   | 6 --
 depends/libhdfs3/mock/MockDatanode.h   | 6 --
 depends/libhdfs3/mock/MockFileSystemInter.h| 6 --
 depends/libhdfs3/mock/MockLeaseRenewer.h   | 6 --
 depends/libhdfs3/mock/MockNamenode.h   | 6 --
 depends/libhdfs3/mock/MockOperationCanceledCallback.h  | 6 --
 depends/libhdfs3/mock/MockPipeline.h   | 6 --
 depends/libhdfs3/mock/MockRpcChannel.h | 6 --
 depends/libhdfs3/mock/MockRpcClient.h  | 6 --
 depends/libhdfs3/mock/MockRpcRemoteCall.h  | 6 --
 depends/libhdfs3/mock/MockSockCall.h   | 6 --
 depends/libhdfs3/mock/MockSocket.h | 6 --
 depends/libhdfs3/mock/MockSystem.cpp   | 6 --
 depends/libhdfs3/mock/MockSystem.h | 6 --
 depends/libhdfs3/mock/NamenodeStub.h   | 6 --
 depends/libhdfs3/mock/PipelineStub.h   | 6 --
 depends/libhdfs3/mock/TestDatanodeStub.h   | 6 --
 depends/libhdfs3/mock/TestRpcChannelStub.h | 6 --
 depends/libhdfs3/mock/TestUtil.h   | 6 --
 depends/libhdfs3/rpms/libhdfs3.spec| 2 --
 depends/libhdfs3/src/client/BlockLocation.h| 6 --
 depends/libhdfs3/src/client/BlockReader.h  | 6 --
 depends/libhdfs3/src/client/DataTransferProtocol.h | 6 --
 depends/libhdfs3/src/client/DataTransferProtocolSender.cpp | 6 --
 depends/libhdfs3/src/client/DataTransferProtocolSender.h   | 6 --
 depends/libhdfs3/src/client/DirectoryIterator.cpp  | 6 --
 depends/libhdfs3/src/client/DirectoryIterator.h| 6 --
 depends/libhdfs3/src/client/FileStatus.h   | 6 --
 depends/libhdfs3/src/client/FileSystem.cpp | 6 --
 depends/libhdfs3/src/client/FileSystem.h   | 6 --
 depends/libhdfs3/src/client/FileSystemImpl.cpp | 6 --
 depends/libhdfs3/src/client/FileSystemImpl.h   | 6 --
 depends/libhdfs3/src/client/FileSystemInter.h  | 6 --
 depends/libhdfs3/src/client/FileSystemKey.cpp  | 6 --
 depends/libhdfs3/src/client/FileSystemKey.h| 6 --
 depends/libhdfs3/src/client/FileSystemStats.h  | 6 --
 depends/libhdfs3/src/client/Hdfs.cpp   | 6 --
 depends/libhdfs3/src/client/InputStream.cpp| 6 --
 depends/libhdfs3/src/client/InputStream.h  | 6 --
 depends/libhdfs3/src/client/InputStreamImpl.cpp| 6 --
 depends/libhdfs3/src/client/InputStreamImpl.h  | 6 --
 depends/libhdfs3/src/client/InputStreamInter.h | 6 --
 depends/libhdfs3/src/client/KerberosName.cpp   | 6 --
 depends/libhdfs3/src/client/KerberosName.h | 6 --
 depends/libhdfs3/src/client/LeaseRenewer.cpp   | 6 --
 depends/libhdfs3/src/client/LeaseRenewer.h | 6 --
 depends/libhdfs3/src/client/LocalBlockReader.cpp   | 6 --
 depends/libhdfs3/src/client/LocalBlockReader.h | 6 --
 depends/libhdfs3/src/client/OutputStream.cpp   | 6 --
 depends/libhdfs3/src/client/OutputStream.h | 6 --
 depends/libhdfs3/src/client/OutputStreamImpl.cpp   | 6 --
 depends/libhdfs3/src/client/OutputStreamImpl.h | 6 --
 depends/libhdfs3/src/client/OutputStreamInter.h| 6 --
 depends/libhdfs3/src/client/Packet.cpp | 6 --
 depends/libhdfs3/src/client/Packet.h   | 6 --
 depends/libhdfs3/src/client/PacketHeader.cpp   | 6 --
 depends/libhdfs3/src/client/PacketHeader.h | 6 --
 depends/libhdfs3/src/client/PacketPool.cpp | 6 --
 depends/libhdfs3/src/client/PacketPool.h

incubator-hawq git commit: HAWQ-808. Refactor feature test for external_oid with new framework.

2016-07-11 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 651ce19d1 -> bab04b57b


HAWQ-808. Refactor feature test for external_oid with new framework.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/bab04b57
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/bab04b57
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/bab04b57

Branch: refs/heads/master
Commit: bab04b57bfc4737b584595e79f61a7afec523c05
Parents: 651ce19
Author: xunzhang 
Authored: Tue Jul 12 14:14:08 2016 +0800
Committer: xunzhang 
Committed: Tue Jul 12 14:23:34 2016 +0800

--
 .../ExternalSource/ans/external_oid.ans.source  | 209 ++
 .../ExternalSource/data/multi_table.json|   1 +
 .../ExternalSource/data/single_table.json   |   1 +
 src/test/feature/ExternalSource/lib/Makefile|  44 ++
 src/test/feature/ExternalSource/lib/function.c  | 727 +++
 .../ExternalSource/sql/external_oid.sql.source  | 108 +++
 .../ExternalSource/test_external_oid.cpp|  35 +
 src/test/feature/Makefile   |   2 +
 src/test/regress/input/external_oid.source  | 118 ---
 src/test/regress/known_good_schedule|   1 -
 src/test/regress/output/external_oid.source | 211 --
 11 files changed, 1127 insertions(+), 330 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/bab04b57/src/test/feature/ExternalSource/ans/external_oid.ans.source
--
diff --git a/src/test/feature/ExternalSource/ans/external_oid.ans.source 
b/src/test/feature/ExternalSource/ans/external_oid.ans.source
new file mode 100644
index 000..3f3f3ae
--- /dev/null
+++ b/src/test/feature/ExternalSource/ans/external_oid.ans.source
@@ -0,0 +1,209 @@
+-- --
+-- test first external Oid initialization
+-- --
+-- start_matchsubs
+--
+-- # create a match/subs expression to handle ip addresses that change
+--
+-- m/.*inserted tuple to heap table pg_class \(oid \d+, relname table_xl\).*/
+-- s/oid \d+/oid SOME_OID/
+--
+-- m/.*deleted tuple oid=\d+ from heap table pg_class.*/
+-- s/oid=\d+/oid=OID/
+--
+-- end_matchsubs
+-- Create function that returns the first external Oid boundary
+CREATE OR REPLACE FUNCTION min_external_oid() RETURNS oid
+  AS '@SHARE_LIBRARY_PATH@', 'min_external_oid'
+  LANGUAGE C;
+CREATE FUNCTION
+-- Create function that returns the current external Oid
+CREATE OR REPLACE FUNCTION get_next_external_oid() RETURNS oid
+  AS '@SHARE_LIBRARY_PATH@', 'get_next_external_oid'
+  LANGUAGE C;
+CREATE FUNCTION
+-- Create function that sets the current external Oid
+CREATE OR REPLACE FUNCTION set_next_external_oid(ext_oid oid) RETURNS oid
+  AS '@SHARE_LIBRARY_PATH@', 'set_next_external_oid'
+  LANGUAGE C;
+CREATE FUNCTION
+-- Create function to insert and scan in-memory data to pg_class
+CREATE OR REPLACE FUNCTION load_json_data(filename text) RETURNS text
+  AS '@SHARE_LIBRARY_PATH@', 'load_json_data'
+  LANGUAGE C;
+CREATE FUNCTION
+-- Create function that inserts tuple with given Oid
+CREATE OR REPLACE FUNCTION caql_insert_into_heap_pg_class(relid oid, tblname 
text) RETURNS text
+  AS '@SHARE_LIBRARY_PATH@', 'caql_insert_into_heap_pg_class'
+  LANGUAGE C;
+CREATE FUNCTION
+-- Create function that inserts tuple with given Oid
+CREATE OR REPLACE FUNCTION caql_delete_from_heap_pg_class(relid oid) RETURNS 
text
+  AS '@SHARE_LIBRARY_PATH@', 'caql_delete_from_heap_pg_class'
+  LANGUAGE C;
+CREATE FUNCTION
+-- --
+-- Test hcat table external oid initialization
+-- --
+-- Boundary should be at FirstExternalObjectId
+--SELECT min_external_oid();
+-- NextExternalObjectId is uninitialized
+SELECT get_next_external_oid();
+ get_next_external_oid
+---
+ 0
+(1 row)
+
+SELECT load_json_data('@abs_datadir@/single_table.json');
+  load_json_data
+--
+ default.mytable
+(1 row)
+
+SELECT get_next_external_oid()::bigint - min_external_oid()::bigint;
+ ?column?
+--
+0
+(1 row)
+
+BEGIN TRANSACTION;
+BEGIN
+SELECT get_next_external_oid()::bigint - min_external_oid()::bigint;
+ ?column?
+--
+0
+(1 row)
+
+-- load default.mytable -> +3 oids
+-- 1 oid for namespace 'default', 1 oid for relation 'mytable', 1 oid for 
reltype
+SELECT load_json_data('@abs_datadir@/single_table.json');
+  load_json_data
+--
+ default.mytable
+(1 row)
+
+SELECT get_next_external_oid()::bigint - min_external_oid()::bigint;
+ ?column? 
+--
+3
+(1 row)
+
+-- load db1.ht1, db2.ht1, db2.ht2 -> +8 oids
+-- oids: db1, ht1(db1), db2, ht1(db2), ht2, reltype(ht1, ht1

incubator-hawq git commit: HAWQ-806. Add feature test for subplan with new framework

2016-07-11 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 3618fe16c -> 651ce19d1


HAWQ-806. Add feature test for subplan with new framework


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/651ce19d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/651ce19d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/651ce19d

Branch: refs/heads/master
Commit: 651ce19d14e3473e6c1ddf42dcf3d4a6bad0daec
Parents: 3618fe1
Author: xunzhang 
Authored: Thu Jun 16 21:56:56 2016 +0800
Committer: Ruilong Huo 
Committed: Tue Jul 12 10:36:37 2016 +0800

--
 src/test/feature/planner/ans/subplan.ans  | 374 +
 src/test/feature/planner/sql/subplan.sql  | 140 +
 src/test/feature/planner/test_subplan.cpp |  15 +
 src/test/regress/expected/subplan.out | 337 --
 src/test/regress/known_good_schedule  |   1 -
 src/test/regress/sql/subplan.sql  | 140 -
 6 files changed, 529 insertions(+), 478 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/651ce19d/src/test/feature/planner/ans/subplan.ans
--
diff --git a/src/test/feature/planner/ans/subplan.ans 
b/src/test/feature/planner/ans/subplan.ans
new file mode 100644
index 000..7335b90
--- /dev/null
+++ b/src/test/feature/planner/ans/subplan.ans
@@ -0,0 +1,374 @@
+-- start_ignore
+SET SEARCH_PATH=TestSubplan_TestSubplanAll;
+SET
+-- end_ignore
+--
+-- These tests are intended to cover GPSQL-1260.  Which means queries
+-- whose plan contains combinations of InitPlan and SubPlan nodes.
+--
+-- Derived from //cdbfast/main/subquery/mpp8334/
+-- SUITE: hash-vs-nl-not-in
+-- start_ignore
+drop schema if exists subplan_tests cascade;
+psql:/tmp/TestSubplan_TestSubplanAll.sql:12: NOTICE:  schema "subplan_tests" 
does not exist, skipping
+DROP SCHEMA
+-- end_ignore
+create schema subplan_tests;
+CREATE SCHEMA
+set search_path=subplan_tests;
+SET
+create table t1(a int, b int) distributed by (a);
+CREATE TABLE
+insert into t1 select i, i+10 from generate_series(-5,5)i;
+INSERT 0 11
+create table i3(a int not null, b int not null) distributed by (a);
+CREATE TABLE
+insert into i3 select i-1, i from generate_series(1,5)i;
+INSERT 0 5
+create table i4(a int, b int) distributed by (a);
+CREATE TABLE
+insert into i4 values(null,null);
+INSERT 0 1
+insert into i4 select i, i-10 from generate_series(-5,0)i;
+INSERT 0 6
+DROP LANGUAGE IF EXISTS plpythonu CASCADE;
+psql:/tmp/TestSubplan_TestSubplanAll.sql:27: NOTICE:  language "plpythonu" 
does not exist, skipping
+DROP LANGUAGE
+CREATE LANGUAGE plpythonu;
+CREATE LANGUAGE
+create or replace function twice(int) returns int as $$
+   select 2 * $1;
+$$ language sql;
+CREATE FUNCTION
+create or replace function half(int) returns int as $$
+begin
+   return $1 / 2;
+end;
+$$ language plpgsql;
+CREATE FUNCTION
+create or replace function thrice(x int) returns int as $$
+if (x is None):
+return 0
+else:
+return x * 3
+$$ language plpythonu;
+CREATE FUNCTION
+select t1.* from t1 where (t1.a, t1.b) not in
+   (select twice(i3.a), i3.b from i3 union select i4.a, thrice(i4.b) from i4);
+ a  | b  
++
+ -2 |  8
+  3 | 13
+  5 | 15
+  4 | 14
+ -1 |  9
+  0 | 10
+  2 | 12
+ -5 |  5
+ -3 |  7
+ -4 |  6
+  1 | 11
+(11 rows)
+
+select t1.* from t1 where (t1.a, half(t1.b)) not in
+   (select twice(i3.a), i3.b from i3 union all select i4.a, i4.b from i4);
+ a | b 
+---+---
+(0 rows)
+
+select t1.a, half(t1.b) from t1 where (t1.a, t1.b) not in
+   (select 1, thrice(2) union select 3, 4);
+ a  | half 
++--
+  4 |7
+ -5 |2
+ -3 |3
+ -4 |3
+  1 |5
+ -1 |4
+  0 |5
+  2 |6
+ -2 |4
+  3 |6
+  5 |7
+(11 rows)
+
+select t1.* from t1 where (half(t1.a), t1.b) not in
+   (select thrice(i3.a), i3.b from i3 union select i4.a, i4.b from i4);
+ a | b 
+---+---
+(0 rows)
+
+select t1.* from t1 where (t1.a, t1.b) not in
+   (select i3.a, half(i3.b) from i3 union all
+  select i4.a, thrice(i4.b) from i4);
+ a  | b  
++
+  4 | 14
+ -1 |  9
+  0 | 10
+  2 | 12
+ -5 |  5
+ -3 |  7
+ -2 |  8
+  3 | 13
+  5 | 15
+ -4 |  6
+  1 | 11
+(11 rows)
+
+-- Two SubPlan nodes
+select t1.* from t1 where (t1.a, t1.b) not in (select i3.a, i3.b from i3) or
+   (t1.a, t1.b) not in (select i4.a, i4.b from i4);
+ a  | b  
++
+ -2 |  8
+  3 | 13
+  5 | 15
+ -1 |  9
+  0 | 10
+  2 | 12
+ -4 |  6
+  1 | 11
+ -5 |  5
+ -3 |  7
+  4 | 14
+(11 rows)
+
+-- Two SubPlan nodes
+select t1.* from t1 where
+   (t1.a, twice(t1.b)) not in (select thrice(i3.a), i3.b from i3) or
+  (t1.a, half(t1.b)) not in (select i4.a, i4.b from i4);
+ a  | b  
++
+ -4 |  6
+  1 | 11
+  4 | 14
+ -2 |  8

[2/2] incubator-hawq git commit: HAWQ-805. Fix version dismatch, and wrong error info with ocra ON.

2016-07-04 Thread huor
HAWQ-805. Fix version dismatch, and wrong error info with ocra ON.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/8b79e10f
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/8b79e10f
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/8b79e10f

Branch: refs/heads/master
Commit: 8b79e10fdb952ad1fd22d377689d434092400a0f
Parents: 17f6982
Author: xunzhang 
Authored: Tue Jul 5 14:18:45 2016 +0800
Committer: Ruilong Huo 
Committed: Tue Jul 5 14:45:03 2016 +0800

--
 .../ExternalSource/ans/exttab1.ans.source   |  17 +-
 .../ExternalSource/sql/exttab1.sql.source   |   1 -
 src/test/regress/input/exttab1.source   | 568 
 src/test/regress/known_good_schedule|   3 +-
 src/test/regress/output/exttab1.source  | 902 ---
 .../regress/output/exttab1_optimizer.source | 901 --
 src/test/regress/parallel_schedule  |   1 -
 7 files changed, 9 insertions(+), 2384 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8b79e10f/src/test/feature/ExternalSource/ans/exttab1.ans.source
--
diff --git a/src/test/feature/ExternalSource/ans/exttab1.ans.source 
b/src/test/feature/ExternalSource/ans/exttab1.ans.source
index e303438..2624abe 100644
--- a/src/test/feature/ExternalSource/ans/exttab1.ans.source
+++ b/src/test/feature/ExternalSource/ans/exttab1.ans.source
@@ -93,7 +93,7 @@ select * from gpfdist_start;
 select * from gpfdist_status;
 x
 -
- Okay, gpfdist version "2.0.0.0 build dev" is running on @hostname@:7070.
+ Okay, gpfdist version "2.0.1.0 build dev" is running on @hostname@:7070.
 (1 row)
 
 -- end_ignore
@@ -182,7 +182,7 @@ SELECT * FROM EXT_REGION as r, EXT_NATION as n WHERE 
n.N_REGIONKEY = r.R_REGIONK
 select * from gpfdist_status;
 x
 -
- Okay, gpfdist version "2.0.0.0 build dev" is running on @hostname@:7070.
+ Okay, gpfdist version "2.0.1.0 build dev" is running on @hostname@:7070.
 (1 row)
 
 select * from gpfdist_stop;
@@ -249,7 +249,7 @@ select * from gpfdist_csv_start;
 select * from gpfdist_status;
 x
 -
- Okay, gpfdist version "2.0.0.0 build dev" is running on @hostname@:7070.
+ Okay, gpfdist version "2.0.1.0 build dev" is running on @hostname@:7070.
 (1 row)
 
 -- end_ignore
@@ -633,8 +633,7 @@ psql:/tmp/TestExternalTable_TestExternalTableAll.sql:411: 
ERROR:  the ON segment
 -- SELECT from WET (negative)
 --
 select * from wet_pos1;
-psql:/tmp/TestExternalTable_TestExternalTableAll.sql:416: ERROR:  it is not 
possible to read from a WRITABLE external table.
-HINT:  Create the table as READABLE instead
+psql:/tmp/TestExternalTable_TestExternalTableAll.sql:416: ERROR:  External 
scan error: It is not possible to read from a WRITABLE external table. Create 
the table as READABLE instead. (COptTasks.cpp:1617)
 --
 -- WET: export some data with INSERT SELECT, INSERT and COPY.
 --
@@ -659,7 +658,7 @@ INSERT 0 0
 select * from gpfdist_status;
 x
 -
- Okay, gpfdist version "2.0.0.0 build dev" is running on @hostname@:7070.
+ Okay, gpfdist version "2.0.1.0 build dev" is running on @hostname@:7070.
 (1 row)
 
 select * from gpfdist_stop;
@@ -681,7 +680,7 @@ select * from gpfdist_status;
 -- get an error for missing gpfdist
 --
 select count(*) from ext_whois;
-psql:/tmp/TestExternalTable_TestExternalTableAll.sql:449: ERROR:  connection 
with gpfdist failed for gpfdist://@hostname@:7070/whois.csv. effective url: 
http://127.0.0.1:7070/whois.csv. error code = 61 (Connection refused)  (seg4 
@hostname@:4 pid=35904)
+psql:/tmp/TestExternalTable_TestExternalTableAll.sql:448: ERROR:  connection 
with gpfdist failed for gpfdist://@hostname@:7070/whois.csv. effective url: 
http://127.0.0.1:7070/whois.csv. error code = 61 (Connection refused)  (seg4 
@hostname@:4 pid=35904)
 --
 -- test CREATE EXTERNAL TABLE privileges
 --
@@ -788,7 +787,7 @@ select * from gpfdist_start;
 select * from gpfdist_status;
 x
 -
- Okay, gpfdist version "2.0.0.0 build dev" is running on @hostname@:7070.
+ Okay, gpfdist version "2.0.1.0 build dev" is running on @hostname@:7070.
 (1 row)
 
 -- end_ignore
@@ -861,7 +860,7 @@ DROP TABLE
 select * from gpfdist_status;
   

[1/2] incubator-hawq git commit: HAWQ-805. Fix version dismatch, and wrong error info with ocra ON.

2016-07-04 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 17f698289 -> 8b79e10fd


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8b79e10f/src/test/regress/output/exttab1_optimizer.source
--
diff --git a/src/test/regress/output/exttab1_optimizer.source 
b/src/test/regress/output/exttab1_optimizer.source
deleted file mode 100755
index 3f03143..000
--- a/src/test/regress/output/exttab1_optimizer.source
+++ /dev/null
@@ -1,901 +0,0 @@
---
--- external tables 1 - short and simple functional tests. The full set of tests
--- exists in cdbunit.
---
--- start_matchsubs
---
--- # replace return code in error message (platform specific)
---
--- m/ERROR\:\s+external table .* command ended with .* not found/
--- s/nosuchcommand\:\s*(command)? not found/nosuchcommand\: NOT FOUND/
---
--- m/ERROR\:\s+external table .* command ended with .*No such file.*/
--- s/nosuchfile\.txt\:\s*No such file (or directory)?/nosuchfile\.txt\: NO 
SUCH FILE/
--- m/ERROR\:\s+external table .* command ended with .*No such file.*/i
--- s/cat\: (cannot open)? nosuchfile\.txt/cat\: nosuchfile\.txt/
---
--- # remove line number - redhat
--- m/ERROR\:\s+external table .* command ended with .*NOT FOUND.*/i
--- s/\s+line \d+\://
--- # remove cannot open - solaris
--- m/ERROR\:\s+external table .* command ended with .*cat\: cannot open.*/i
--- s/cat\: cannot open (.*)$/cat\: $1\: NO SUCH FILE/
---
--- end_matchsubs
-SET gp_foreign_data_access = true;
-set optimizer_disable_missing_stats_collection = on;
-CREATE TABLE REG_REGION (R_REGIONKEY INT, R_NAME CHAR(25), R_COMMENT 
VARCHAR(152)) DISTRIBUTED BY (R_REGIONKEY);
--- --
--- 'file' protocol - (only CREATE, don't SELECT - won't work on distributed 
installation)
--- --
-CREATE EXTERNAL TABLE EXT_NATION  ( N_NATIONKEY  INTEGER ,
-N_NAME   CHAR(25) ,
-N_REGIONKEY  INTEGER ,
-N_COMMENTVARCHAR(152))
-location ('file://@hostname@@abs_srcdir@/data/nation.tbl' )
-FORMAT 'text' (delimiter '|');
-ERROR:  the file protocol for external tables is deprecated
-HINT:  use the gpfdist protocol or COPY FROM instead
-CREATE EXTERNAL TABLE EXT_REGION  (LIKE REG_REGION)
-location ('file://@hostname@@abs_srcdir@/data/region.tbl' )
-FORMAT 'text' (delimiter '|');
-ERROR:  the file protocol for external tables is deprecated
-HINT:  use the gpfdist protocol or COPY FROM instead
--- start_ignore
--- --
--- check platform
--- --
-drop external web table if exists check_ps;
-NOTICE:  table "check_ps" does not exist, skipping
-CREATE EXTERNAL WEB TABLE check_ps (x text)
-execute E'( (ps -ef || ps -aux) | grep gpfdist | grep -v grep)'
-on SEGMENT 0
-format 'text';
-drop external web table if exists check_env;
-NOTICE:  table "check_env" does not exist, skipping
-CREATE EXTERNAL WEB TABLE check_env (x text)
-execute E'( env | sort)'
-on SEGMENT 0
-format 'text';
-select * from check_ps;
-ERROR:  external table check_ps command ended with error.  (seg0 slice1 
@hostname@:5 pid=64819)
-DETAIL:  Command: execute:( (ps -ef || ps -aux) | grep gpfdist | grep -v grep)
-select * from check_env;
-   

   x


  
--
- CLICOLOR=1
- 
DYLD_LIBRARY_PATH=/Users/@gpcurusername@/greenplum-db-devel/lib:/Users/@gpcurusername@/greenplum-db-devel/ext/python/lib:/opt/gcc_infrastructure/lib:/Users/@gpcurusername@/greenplum-perfmon-web-4.0.1.0/lib:/Users/@gpcurusername@/greenplum-db-devel/lib:/Users/@gpcurusername@/greenplum-db-devel/ext/python/lib:
- GPHOME=/Users/@gpcurusername@/greenplum-db-devel
- GPPERFMONHOME=/Users/@gpcurusername@/greenplum-perfmon-web-4.0.1.0
- GPROOT=/Users/@gpcurusername@
- GP_CID=0
- GP_DATABASE=regression
- GP_DATE=20110816
- GP_HADOOP_CONN_JARDIR=lib//hadoop
- GP_HADOOP_CONN_VERSION=CE_1.0.0.0
- GP_MASTER_HOST=127.0.0.1
- GP_MASTER_PORT=5432
- GP_SEGMENT_COUNT=2
- GP_SEGMENT_ID=0
- GP_SEG_DATADIR=/Use

incubator-hawq git commit: HAWQ-814. Enable parallel compilation of share library and remove create_function_1 from installcheck

2016-07-04 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 831de3a6d -> 8f32e417e


HAWQ-814. Enable parallel compilation of share library and remove 
create_function_1 from installcheck


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/8f32e417
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/8f32e417
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/8f32e417

Branch: refs/heads/master
Commit: 8f32e417e0b58c2c2815b671b188af6f5d72fd5e
Parents: 831de3a
Author: Ruilong Huo 
Authored: Tue Jul 5 12:27:11 2016 +0800
Committer: Ruilong Huo 
Committed: Tue Jul 5 12:27:11 2016 +0800

--
 src/test/feature/UDF/lib/Makefile   |  4 +-
 src/test/regress/input/create_function_1.source | 79 ---
 src/test/regress/known_good_schedule|  1 -
 .../regress/output/create_function_1.source | 82 
 4 files changed, 2 insertions(+), 164 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8f32e417/src/test/feature/UDF/lib/Makefile
--
diff --git a/src/test/feature/UDF/lib/Makefile 
b/src/test/feature/UDF/lib/Makefile
index ccbf99b..f7c1879 100755
--- a/src/test/feature/UDF/lib/Makefile
+++ b/src/test/feature/UDF/lib/Makefile
@@ -27,12 +27,12 @@ TARGET = function.so
 
 RM = rm -rf 
 
-all: $(OBJS) $(TARGET)
+all: $(TARGET)
 
 $(OBJS): $(PROG)
$(CXX) $(CXXFLAGS) $(CPPFLAGS) -c -o $(OBJS) $(PROG)
 
-$(TARGET):
+$(TARGET): $(OBJS)
 ifeq ($(OS),Darwin)
$(CXX) $(CXXFLAGS) -bundle $(OBJS) -bundle_loader $(POSTGRES) 
$(LDFLAGS) -o $@
 else

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8f32e417/src/test/regress/input/create_function_1.source
--
diff --git a/src/test/regress/input/create_function_1.source 
b/src/test/regress/input/create_function_1.source
deleted file mode 100755
index 71c0fdd..000
--- a/src/test/regress/input/create_function_1.source
+++ /dev/null
@@ -1,79 +0,0 @@
---
--- CREATE_FUNCTION_1
---
-
-CREATE FUNCTION widget_in(cstring)
-   RETURNS widget
-   AS '@abs_builddir@/regress@DLSUFFIX@'
-   LANGUAGE C IMMUTABLE STRICT;
-
-CREATE FUNCTION widget_out(widget)
-   RETURNS cstring
-   AS '@abs_builddir@/regress@DLSUFFIX@'
-   LANGUAGE C IMMUTABLE STRICT;
-
-CREATE FUNCTION int44in(cstring)
-   RETURNS city_budget
-   AS '@abs_builddir@/regress@DLSUFFIX@'
-   LANGUAGE C IMMUTABLE STRICT;
-
-CREATE FUNCTION int44out(city_budget)
-   RETURNS cstring
-   AS '@abs_builddir@/regress@DLSUFFIX@'
-   LANGUAGE C IMMUTABLE STRICT;
-
-CREATE FUNCTION check_primary_key ()
-   RETURNS trigger
-   AS '@abs_builddir@/regress@DLSUFFIX@'
-   LANGUAGE C;
-
-CREATE FUNCTION check_foreign_key ()
-   RETURNS trigger
-   AS '@abs_builddir@/regress@DLSUFFIX@'
-   LANGUAGE C;
-
-CREATE FUNCTION autoinc ()
-   RETURNS trigger
-   AS '@abs_builddir@/regress@DLSUFFIX@'
-   LANGUAGE C;
-
-CREATE FUNCTION funny_dup17 ()
-RETURNS trigger
-AS '@abs_builddir@/regress@DLSUFFIX@'
-LANGUAGE C;
-
-CREATE FUNCTION ttdummy ()
-RETURNS trigger
-AS '@abs_builddir@/regress@DLSUFFIX@'
-LANGUAGE C;
-
-CREATE FUNCTION set_ttdummy (int4)
-RETURNS int4
-AS '@abs_builddir@/regress@DLSUFFIX@'
-LANGUAGE C STRICT;
-
--- Things that shouldn't work:
-
-CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
-AS 'SELECT ''not an integer'';';
-
-CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
-AS 'not even SQL';
-
-CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
-AS 'SELECT 1, 2, 3;';
-
-CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
-AS 'SELECT $2;';
-
-CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
-AS 'a', 'b';
-
-CREATE FUNCTION test1 (int) RETURNS int LANGUAGE C
-AS 'nosuchfile';
-
-CREATE FUNCTION test1 (int) RETURNS int LANGUAGE C
-AS '@abs_builddir@/regress@DLSUFFIX@', 'nosuchsymbol';
-
-CREATE FUNCTION test1 (int) RETURNS int LANGUAGE internal
-AS 'nosuch';

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8f32e417/src/test/regress/known_good_schedule
--
diff --git a/src/test/regress/known_good_schedule 
b/src/test/regress/known_good_schedule
index bb14fa2..9e393ec 100755
--- a/src/test/regress/known_good_schedule
+++ b/src/test/regress/known_good_schedule
@@ -57,7 +57,6 @@ ignore: oidjoins
 ignore: opr_sanity
 ignore: geometry
 ignore: horology
-test: create_function_1
 test: subplan
 ignore: create_type
 test: create_table_test

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/8f32e417/src/test/regress/output/create_

[3/3] incubator-hawq git commit: HAWQ-814. Enhance user-defined function by migrating create_function_1 of UDF from installcheck to new feature test framework

2016-07-04 Thread huor
HAWQ-814. Enhance user-defined function by migrating create_function_1 of UDF 
from installcheck to new feature test framework


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/49fd529a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/49fd529a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/49fd529a

Branch: refs/heads/master
Commit: 49fd529aa01d54199465639060c01577bca06981
Parents: c66cfba
Author: Ruilong Huo 
Authored: Tue Jun 28 09:49:05 2016 +0800
Committer: Ruilong Huo 
Committed: Tue Jul 5 09:40:03 2016 +0800

--
 src/test/feature/Makefile   |   12 +-
 src/test/feature/UDF/TestUDF.cpp|   63 +
 .../feature/UDF/ans/function_basics.ans.orca| 1088 +++
 .../feature/UDF/ans/function_basics.ans.planner | 1076 +++
 .../UDF/ans/function_creation.ans.source|   96 ++
 src/test/feature/UDF/ans/function_extension.ans |  183 +++
 .../feature/UDF/ans/function_set_returning.ans  |  287 
 src/test/feature/UDF/lib/Makefile   |   43 +
 src/test/feature/UDF/lib/function.c | 1245 ++
 src/test/feature/UDF/sql/function_basics.sql|  439 ++
 .../UDF/sql/function_creation.sql.source|   79 ++
 src/test/feature/UDF/sql/function_extension.sql |  123 ++
 .../feature/UDF/sql/function_set_returning.sql  |   93 ++
 src/test/feature/udf/TestUDF.cpp|   32 -
 src/test/feature/udf/ans/function_basics.ans| 1088 ---
 src/test/feature/udf/ans/function_extension.ans |  183 ---
 .../feature/udf/ans/function_set_returning.ans  |  287 
 src/test/feature/udf/sql/function_basics.sql|  439 --
 src/test/feature/udf/sql/function_extension.sql |  123 --
 .../feature/udf/sql/function_set_returning.sql  |   93 --
 20 files changed, 4824 insertions(+), 2248 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/49fd529a/src/test/feature/Makefile
--
diff --git a/src/test/feature/Makefile b/src/test/feature/Makefile
index 82a3dc3..e97716c 100644
--- a/src/test/feature/Makefile
+++ b/src/test/feature/Makefile
@@ -15,16 +15,22 @@ override LDFLAGS += -L/usr/local/lib -L/usr/lib 
-L$(abs_top_srcdir)/src/test/fea
 PROG = test_main.cpp $(wildcard */*.cpp)
 RM = rm -rf
 
-.PHONY: all distclean clean doc
+.PHONY: all sharelib sharelibclean distclean clean doc
 
-all:
+all: sharelib
$(MAKE) -C lib all
$(CXX) $(CPPFLAGS) $(CXXFLAGS) $(PROG) $(LDFLAGS) $(LIBS) -o 
feature-test
 
+sharelib:
+   cd UDF/lib || exit 1; $(MAKE) || exit 2; $(MAKE) clean || exit 3
+
+sharelibclean:
+   cd UDF/lib || exit 1; $(RM) *.o *.so || exit 2
+
 doc:
doxygen doxygen_template
 
-clean distclean:
+clean distclean: sharelibclean
$(RM) feature-test
$(RM) feature-test.dSYM
$(RM) doc

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/49fd529a/src/test/feature/UDF/TestUDF.cpp
--
diff --git a/src/test/feature/UDF/TestUDF.cpp b/src/test/feature/UDF/TestUDF.cpp
new file mode 100755
index 000..ce7f158
--- /dev/null
+++ b/src/test/feature/UDF/TestUDF.cpp
@@ -0,0 +1,63 @@
+#include "gtest/gtest.h"
+
+#include "lib/sql_util.h"
+#include "lib/file_replace.h"
+
+
+class TestUDF: public ::testing::Test
+{
+   public:
+   TestUDF() {}
+   ~TestUDF() {}
+};
+
+TEST_F(TestUDF, TestUDFBasics)
+{
+   hawq::test::SQLUtility util;
+   if (util.getGUCValue("optimizer") == "on")
+   {
+   util.execSQLFile("UDF/sql/function_basics.sql",
+"UDF/ans/function_basics.ans.orca");
+   }
+   else
+   {
+   util.execSQLFile("UDF/sql/function_basics.sql",
+"UDF/ans/function_basics.ans.planner");
+   }
+}
+
+TEST_F(TestUDF, TestUDFCreation)
+{
+   // preprocess source files to get sql/ans files
+   hawq::test::SQLUtility util;
+   std::string d_feature_test_root(util.getTestRootPath());
+   std::string f_sql_tpl(d_feature_test_root + 
"/UDF/sql/function_creation.sql.source");
+   std::string f_ans_tpl(d_feature_test_root + 
"/UDF/ans/function_creation.ans.source");
+   std::string f_sql(d_feature_test_root + 
"/UDF/sql/function_creation.sql");
+   std::string f_ans(d_feature_test_root + 
"/UDF/ans/function_creation.ans");
+
+   hawq::test::FileReplace frep;
+   std::unordered_map strs_src_dst;
+   strs_src_dst["@SHARE_LIBRARY_PATH@"] = d_feature_test_root + 
"/UDF/lib/function.so";
+
+   frep.replace(f_sql_tpl, f_sql, strs_src_dst);
+   frep.replace(f_ans_tpl, f_ans, strs_src_dst);
+

[2/3] incubator-hawq git commit: HAWQ-814. Enhance user-defined function by migrating create_function_1 of UDF from installcheck to new feature test framework

2016-07-04 Thread huor
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/49fd529a/src/test/feature/UDF/lib/function.c
--
diff --git a/src/test/feature/UDF/lib/function.c 
b/src/test/feature/UDF/lib/function.c
new file mode 100755
index 000..f346855
--- /dev/null
+++ b/src/test/feature/UDF/lib/function.c
@@ -0,0 +1,1245 @@
+#include "postgres.h"
+#include "funcapi.h"
+#include "tablefuncapi.h"
+
+#include 
+#include 
+
+#include "access/transam.h"
+#include "access/xact.h"
+#include "catalog/pg_type.h"
+#include "cdb/memquota.h"
+#include "commands/sequence.h"
+#include "commands/trigger.h"
+#include "executor/executor.h"
+#include "executor/spi.h"
+#include "parser/parse_expr.h"
+#include "libpq/auth.h"
+#include "libpq/hba.h"
+#include "utils/builtins.h"
+#include "utils/geo_decls.h"
+#include "utils/lsyscache.h"
+#include "utils/resscheduler.h"
+
+
+#define LDELIM '('
+#define RDELIM ')'
+#define NARGS  3
+#define TTDUMMY_INFINITY 99
+
+
+extern Datum int44in(PG_FUNCTION_ARGS);
+extern Datum int44out(PG_FUNCTION_ARGS);
+extern Datum check_primary_key(PG_FUNCTION_ARGS);
+extern Datum check_foreign_key(PG_FUNCTION_ARGS);
+extern Datum autoinc(PG_FUNCTION_ARGS);
+extern Datum funny_dup17(PG_FUNCTION_ARGS);
+extern Datum ttdummy(PG_FUNCTION_ARGS);
+extern Datum set_ttdummy(PG_FUNCTION_ARGS);
+
+
+#ifdef PG_MODULE_MAGIC
+PG_MODULE_MAGIC;
+#endif
+
+
+
+/* widget_in and widget_out */
+typedef struct
+{
+  Point   center;
+  doubleradius;
+} WIDGET;
+
+WIDGET *widget_in(char *str);
+char *widget_out(WIDGET * widget);
+
+WIDGET *widget_in(char *str)
+{
+   char *p, *coord[NARGS], buf2[1000];
+   int i;
+   WIDGET *result;
+
+   if (str == NULL)
+   {
+   return NULL;
+   }
+
+   for (i = 0, p = str; *p && i < NARGS && *p != RDELIM; p++)
+   {
+   if (*p == ',' || (*p == LDELIM && !i))
+   {
+   coord[i++] = p + 1;
+   }
+   }
+
+   if (i < NARGS - 1)
+   {
+   return NULL;
+   }
+
+   result = (WIDGET *) palloc(sizeof(WIDGET));
+   result->center.x = atof(coord[0]);
+   result->center.y = atof(coord[1]);
+   result->radius = atof(coord[2]);
+
+   snprintf(buf2, sizeof(buf2), "widget_in: read (%f, %f, %f)\n",
+result->center.x, result->center.y, result->radius);
+
+   return result;
+}
+
+
+char *widget_out(WIDGET * widget)
+{
+   char *result;
+
+   if (widget == NULL)
+   {
+   return NULL;
+   }
+
+   result = (char *) palloc(60);
+
+   sprintf(result, "(%g,%g,%g)",
+   widget->center.x, widget->center.y, widget->radius);
+
+   return result;
+}
+
+
+
+/* int44in and int44out */
+/*
+ * Type int44 has no real-world use, but the function tests use it.
+ * It's a four-element vector of int4's.
+ */
+
+/*
+ * int44in: converts "num num ..." to internal form
+ * Note: Fills any missing positions with zeroes.
+ */
+PG_FUNCTION_INFO_V1(int44in);
+Datum int44in(PG_FUNCTION_ARGS)
+{
+   char *input_string = PG_GETARG_CSTRING(0);
+   int32 *result = (int32 *) palloc(4 * sizeof(int32));
+   int i;
+
+   i = sscanf(input_string,
+  "%d, %d, %d, %d",
+  &result[0],
+  &result[1],
+  &result[2],
+  &result[3]);
+
+   while (i < 4)
+   {
+   result[i++] = 0;
+   }
+
+   PG_RETURN_POINTER(result);
+}
+
+/*
+ * int44out: converts internal form to "num num ..."
+ */
+PG_FUNCTION_INFO_V1(int44out);
+Datum int44out(PG_FUNCTION_ARGS)
+{
+   int32 *an_array = (int32 *) PG_GETARG_POINTER(0);
+   /* Allow 14 digits sign */
+   char *result = (char *) palloc(16 * 4);
+   int i;
+   char *walk;
+
+   walk = result;
+   for (i = 0; i < 4; i++)
+   {
+   pg_ltoa(an_array[i], walk);
+   while (*++walk != '\0')
+   ;
+   *walk++ = ' ';
+   }
+   *--walk = '\0';
+   PG_RETURN_CSTRING(result);
+}
+
+
+
+/* check_primary_key, check_foreign_key and helper function find_plan */
+typedef struct
+{
+   char   *ident;
+   int nplans;
+   void  **splan;
+}  EPlan;
+
+static EPlan *FPlans = NULL;
+static int nFPlans = 0;
+static EPlan *PPlans = NULL;
+static int nPPlans = 0;
+
+static EPlan *find_plan(char *ident, EPlan ** eplan, int *nplans);
+
+/*
+ * check_primary_key () -- check that key in tuple being inserted/updated
+ *  references existing tuple in "primary" table.
+ * Though it's called without args You have to specify referenced
+ * table/keys while creating trigger:  key field names in triggered table,
+ * referenced table name, referenced key field names:
+ * EXECUTE PROCEDURE
+ * check_primary_key ('Fkey1', 'Fkey2', 'Ptable', 'Pkey1', 'Pkey2').
+ */
+PG_FUNCTION_IN

[1/3] incubator-hawq git commit: HAWQ-814. Enhance user-defined function by migrating create_function_1 of UDF from installcheck to new feature test framework

2016-07-04 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master c66cfbad1 -> 49fd529aa


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/49fd529a/src/test/feature/udf/ans/function_set_returning.ans
--
diff --git a/src/test/feature/udf/ans/function_set_returning.ans 
b/src/test/feature/udf/ans/function_set_returning.ans
deleted file mode 100755
index 4544a2d..000
--- a/src/test/feature/udf/ans/function_set_returning.ans
+++ /dev/null
@@ -1,287 +0,0 @@
--- start_ignore
-SET SEARCH_PATH=TestUDF_TestUDFSetReturning;
-SET
--- end_ignore
-DROP LANGUAGE IF EXISTS plpythonu CASCADE;
-DROP LANGUAGE
-CREATE LANGUAGE plpythonu;
-CREATE LANGUAGE
-CREATE TABLE foo2(fooid int, f2 int);
-CREATE TABLE
-INSERT INTO foo2 VALUES(1, 11);
-INSERT 0 1
-INSERT INTO foo2 VALUES(2, 22);
-INSERT 0 1
-INSERT INTO foo2 VALUES(1, 111);
-INSERT 0 1
-CREATE FUNCTION foot(int) returns setof foo2 as 'SELECT * FROM foo2 WHERE 
fooid = $1;' LANGUAGE SQL;
-CREATE FUNCTION
-select foot.fooid, foot.f2 from foot(sin(pi()/2)::int) ORDER BY 1,2;
- fooid | f2  
+-
- 1 |  11
- 1 | 111
-(2 rows)
-
-CREATE TABLE foo (fooid int, foosubid int, fooname text);
-CREATE TABLE
-INSERT INTO foo VALUES(1,1,'Joe');
-INSERT 0 1
-INSERT INTO foo VALUES(1,2,'Ed');
-INSERT 0 1
-INSERT INTO foo VALUES(2,1,'Mary');
-INSERT 0 1
-CREATE FUNCTION getfoo(int) RETURNS setof int AS 'SELECT fooid FROM foo WHERE 
fooid = $1;' LANGUAGE SQL;
-CREATE FUNCTION
-SELECT * FROM getfoo(1) AS t1;
- t1 
-
-  1
-  1
-(2 rows)
-
-CREATE VIEW vw_getfoo AS SELECT * FROM getfoo(1);
-CREATE VIEW
-SELECT * FROM vw_getfoo;
- getfoo 
-
-  1
-  1
-(2 rows)
-
-DROP VIEW vw_getfoo;
-DROP VIEW
-DROP FUNCTION getfoo(int);
-DROP FUNCTION
-DROP FUNCTION foot(int);
-DROP FUNCTION
-DROP TABLE foo2;
-DROP TABLE
-DROP TABLE foo;
-DROP TABLE
--- setof as a paramater --
-CREATE TYPE numtype as (i int, j int);
-CREATE TYPE
-CREATE FUNCTION g_numtype(x setof numtype) RETURNS setof numtype AS $$ select 
$1; $$ LANGUAGE SQL;
-CREATE FUNCTION
-DROP FUNCTION g_numtype(x setof numtype);
-DROP FUNCTION
-DROP TYPE numtype;
-DROP TYPE
---
--- Set functions samples from Madlib
---
-create function combination(s text) returns setof text[] as $$
-x = s.split(',')
-
-def subset(myset, N):
-   left = []
-   right = []
-   for i in range(0, len(myset)):
-  if ((1 << i) & N) > 0:
- left.append(myset[i])
-  else:
- right.append(myset[i])
-   return (', '.join(left), ', '.join(right))
-
-for i in range(1, (1 << len(x)) - 2):
-   yield subset(x, i)
-$$ language plpythonu strict;
-CREATE FUNCTION
-select x[1] || ' => ' || x[2] from combination('a,b,c,d') x;
-   ?column?   
---
- a => b, c, d
- b => a, c, d
- a, b => c, d
- c => a, b, d
- a, c => b, d
- b, c => a, d
- a, b, c => d
- d => a, b, c
- a, d => b, c
- b, d => a, c
- a, b, d => c
- c, d => a, b
- a, c, d => b
-(13 rows)
-
-CREATE TABLE rules(rule text) distributed by (rule);
-CREATE TABLE
-insert into rules values('a,b,c');
-INSERT 0 1
-insert into rules values('d,e');
-INSERT 0 1
-insert into rules values('f,g,h,i,j');
-INSERT 0 1
-insert into rules values('k,l,m');
-INSERT 0 1
-SELECT rule, combination(rule) from rules order by 1,2;
-   rule|combination 
+
- a,b,c | {a,"b, c"}
- a,b,c | {"a, b",c}
- a,b,c | {"a, c",b}
- a,b,c | {b,"a, c"}
- a,b,c | {c,"a, b"}
- d,e   | {d,e}
- f,g,h,i,j | {f,"g, h, i, j"}
- f,g,h,i,j | {"f, g","h, i, j"}
- f,g,h,i,j | {"f, g, h","i, j"}
- f,g,h,i,j | {"f, g, h, i",j}
- f,g,h,i,j | {"f, g, h, j",i}
- f,g,h,i,j | {"f, g, i","h, j"}
- f,g,h,i,j | {"f, g, i, j",h}
- f,g,h,i,j | {"f, g, j","h, i"}
- f,g,h,i,j | {"f, h","g, i, j"}
- f,g,h,i,j | {"f, h, i","g, j"}
- f,g,h,i,j | {"f, h, i, j",g}
- f,g,h,i,j | {"f, h, j","g, i"}
- f,g,h,i,j | {"f, i","g, h, j"}
- f,g,h,i,j | {"f, i, j","g, h"}
- f,g,h,i,j | {"f, j","g, h, i"}
- f,g,h,i,j | {g,"f, h, i, j"}
- f,g,h,i,j | {"g, h","f, i, j"}
- f,g,h,i,j | {"g, h, i","f, j"}
- f,g,h,i,j | {"g, h, j","f, i"}
- f,g,h,i,j | {"g, i","f, h, j"}
- f,g,h,i,j | {"g, i, j","f, h"}
- f,g,h,i,j | {"g, j","f, h, i"}
- f,g,h,i,j | {h,"f, g, i, j"}
- f,g,h,i,j | {"h, i","f, g, j"}
- f,g,h,i,j | {"h, i, j","f, g"}
- f,g,h,i,j | {"h, j","f, g, i"}
- f,g,h,i,j | {i,"f, g, h, j"}
- f,g,h,i,j | {"i, j","f, g, h"}
- f,g,h,i,j | {j,"f, g, h, i"}
- k,l,m | {k,"l, m"}
- k,l,m | {"k, l",m}
- k,l,m | {"k, m",l}
- k,l,m | {l,"k, m"}
- k,l,m | {m,"k, l"}
-(40 rows)
-
-DROP TABLE IF EXISTS foo;
-psql:/tmp/TestUDF_TestUDFSetReturning.sql:69: NOTICE:  table "foo" does not 
exist, skipping
-DROP TABLE
-CREATE TABLE foo AS SELECT rule, combination(rule) from rules distributed by 
(rule);
-SELECT 40
--- UDT as argument/return type of set returning UDF
-CREATE TYPE r_type as (a int, b text);
-CREATE TYPE
-CREATE FUNCTION f1(x r_type) returns setof text as $$ SELECT $1.b from 
generate

incubator-hawq git commit: HAWQ-597. Update README.md

2016-07-03 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 3334d001b -> 745bdca4c


HAWQ-597. Update README.md


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/745bdca4
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/745bdca4
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/745bdca4

Branch: refs/heads/master
Commit: 745bdca4cb8ed27db319f05899ae1554cdc29a29
Parents: 3334d00
Author: xunzhang 
Authored: Mon Jul 4 11:22:20 2016 +0800
Committer: xunzhang 
Committed: Mon Jul 4 11:22:20 2016 +0800

--
 README.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/745bdca4/README.md
--
diff --git a/README.md b/README.md
index c3ad91c..9f0123d 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,4 @@
-![HAWQ](http://hawq.incubator.apache.org/images/logo-hawq.png)
+![HAWQ](http://hawq.incubator.apache.org/images/logo-hawq.png) 
[![https://travis-ci.org/apache/incubator-hawq.png](https://travis-ci.org/apache/incubator-hawq.png)](https://travis-ci.org/apache/incubator-hawq)
 
 [Website](http://hawq.incubator.apache.org/) |
 [Wiki](https://cwiki.apache.org/confluence/display/HAWQ/Apache+HAWQ+Home) |



[3/3] incubator-hawq git commit: HAWQ-805. Add feature test for exttab1 with new framework

2016-07-03 Thread huor
HAWQ-805. Add feature test for exttab1 with new framework


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/a2fa04ff
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/a2fa04ff
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/a2fa04ff

Branch: refs/heads/master
Commit: a2fa04ff66f3e14861e390cafb74b885e7a0b991
Parents: b4993b2
Author: xunzhang 
Authored: Thu Jun 16 19:42:04 2016 +0800
Committer: Ruilong Huo 
Committed: Mon Jul 4 10:38:00 2016 +0800

--
 .../ExternalSource/ans/exttab1.ans.source   |  887 
 .../ExternalSource/data/missing_fields1.data|5 +
 .../ExternalSource/data/missing_fields2.data|6 +
 .../feature/ExternalSource/data/mpp12839_1.data |2 +
 .../feature/ExternalSource/data/mpp12839_2.data |3 +
 .../feature/ExternalSource/data/mpp12839_3.data |2 +
 .../feature/ExternalSource/data/mpp17980.data   |   18 +
 .../feature/ExternalSource/data/mpp6698.data|3 +
 src/test/feature/ExternalSource/data/region.tbl |5 +
 src/test/feature/ExternalSource/data/whois.csv  | 1303 ++
 .../feature/ExternalSource/sql/errortbl.source  |   93 ++
 .../ExternalSource/sql/exttab1.sql.source   |  551 
 .../ExternalSource/sql_source/errortbl.source   |   93 --
 src/test/feature/ExternalSource/test_exttab.cpp |   35 +
 14 files changed, 2913 insertions(+), 93 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a2fa04ff/src/test/feature/ExternalSource/ans/exttab1.ans.source
--
diff --git a/src/test/feature/ExternalSource/ans/exttab1.ans.source 
b/src/test/feature/ExternalSource/ans/exttab1.ans.source
new file mode 100644
index 000..e303438
--- /dev/null
+++ b/src/test/feature/ExternalSource/ans/exttab1.ans.source
@@ -0,0 +1,887 @@
+-- start_ignore
+SET SEARCH_PATH=TestExternalTable_TestExternalTableAll;
+SET
+-- end_ignore
+--
+-- external tables 1 - short and simple functional tests. The full set of tests
+-- exists in cdbunit.
+--
+-- start_matchsubs
+--
+-- # replace return code in error message (platform specific)
+--
+-- m/ERROR\:\s+external table .* command ended with .* not found/
+-- s/nosuchcommand\:\s*(command)? not found/nosuchcommand\: NOT FOUND/
+--
+-- m/ERROR\:\s+external table .* command ended with .*No such file.*/
+-- s/nosuchfile\.txt\:\s*No such file (or directory)?/nosuchfile\.txt\: NO 
SUCH FILE/
+-- m/ERROR\:\s+external table .* command ended with .*No such file.*/i
+-- s/cat\: (cannot open)? nosuchfile\.txt/cat\: nosuchfile\.txt/
+--
+-- # remove line number - redhat
+-- m/ERROR\:\s+external table .* command ended with .*NOT FOUND.*/i
+-- s/\s+line \d+\://
+-- # remove cannot open - solaris
+-- m/ERROR\:\s+external table .* command ended with .*cat\: cannot open.*/i
+-- s/cat\: cannot open (.*)$/cat\: $1\: NO SUCH FILE/
+--
+-- end_matchsubs
+SET gp_foreign_data_access = true;
+SET
+set optimizer_disable_missing_stats_collection = on;
+SET
+CREATE TABLE REG_REGION (R_REGIONKEY INT, R_NAME CHAR(25), R_COMMENT 
VARCHAR(152)) DISTRIBUTED BY (R_REGIONKEY);
+CREATE TABLE
+-- start_ignore
+-- --
+-- check platform
+-- --
+drop external web table if exists check_echo;
+psql:/tmp/TestExternalTable_TestExternalTableAll.sql:36: NOTICE:  table 
"check_echo" does not exist, skipping
+DROP EXTERNAL TABLE
+CREATE EXTERNAL WEB TABLE check_echo (x text)
+execute E'(echo gpfdist)'
+on SEGMENT 0
+format 'text';
+CREATE EXTERNAL TABLE
+select * from check_echo;
+x
+-
+ gpfdist
+(1 row)
+
+-- end_ignore
+-- --
+-- 'gpfdist' protocol
+-- --
+CREATE EXTERNAL WEB TABLE gpfdist_status (x text)
+execute E'( python $GPHOME/bin/lib/gppinggpfdist.py @hostname@:7070 2>&1 || 
echo) '
+on SEGMENT 0
+FORMAT 'text' (delimiter '|');
+CREATE EXTERNAL TABLE
+CREATE EXTERNAL WEB TABLE gpfdist_start (x text)
+execute E'((@gpwhich_gpfdist@ -p 7070 -d @abs_srcdir@/data  /dev/null 2>&1 &); sleep 2; echo "starting...") '
+on SEGMENT 0
+FORMAT 'text' (delimiter '|');
+CREATE EXTERNAL TABLE
+CREATE EXTERNAL WEB TABLE gpfdist_stop (x text)
+execute E'(/bin/pkill gpfdist || killall gpfdist) > /dev/null 2>&1; echo 
"stopping..."'
+on SEGMENT 0
+FORMAT 'text' (delimiter '|');
+CREATE EXTERNAL TABLE
+-- start_ignore
+select * from gpfdist_stop;
+  x
+-
+ stopping...
+(1 row)
+
+select * from gpfdist_status;
+  x
+--
+ Error: gpfdist is not running (reason: socket error)
+ Exit: 1
+
+(3 rows)
+
+select * from gpfdist_start;
+  x
+-
+ starting...
+(1 row)
+
+selec

[2/3] incubator-hawq git commit: HAWQ-805. Add feature test for exttab1 with new framework

2016-07-03 Thread huor
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a2fa04ff/src/test/feature/ExternalSource/data/whois.csv
--
diff --git a/src/test/feature/ExternalSource/data/whois.csv 
b/src/test/feature/ExternalSource/data/whois.csv
new file mode 100755
index 000..a54ee19
--- /dev/null
+++ b/src/test/feature/ExternalSource/data/whois.csv
@@ -0,0 +1,1303 @@
+source_line_number,domain,subdomain,tld,ip_address,ip_address_int,reverse_ip_address,reverse_ip_address_int,registrar,registrar_url,whois_server,harvest_date,creation_date,updated_date,expiration_date,rank,status,update_status,nameserver1,nameserver1_domain,nameserver1_subdomain,nameserver1_tld,nameserver2,nameserver2_domain,nameserver2_subdomain,nameserver2_tld,nameserver3,nameserver3_domain,nameserver3_subdomain,nameserver3_tld,nameserver4,nameserver4_domain,nameserver4_subdomain,nameserver4_tld,nameserver5,nameserver5_domain,nameserver5_subdomain,nameserver5_tld,registrant_name,registrant_org,registrant_email,registrant_email_domain,registrant_email_subdomain,registrant_email_tld,registrant_phone,registrant_fax,registrant_street_address,registrant_address1,registrant_address2,registrant_address3,registrant_city,registrant_region,registrant_postal_code,registrant_country,tech_name,tech_org,tech_email,tech_email_domain,tech_email_subdomain,tech_email_tld,tech_phone,tech_fax,tech_st
 
reet_address,tech_address1,tech_address2,tech_address3,tech_city,tech_region,tech_postal_code,tech_country,admin_name,admin_org,admin_email,admin_email_domain,admin_email_subdomain,admin_email_tld,admin_phone,admin_fax,admin_street_address,admin_street1,admin_street2,admin_street3,admin_city,admin_region,admin_postal_code,admin_country,path,raw_record
+2,00111.net,net,net2006-06-16 19:33:12.11421,2006-02-17,2006-02-18 
01:19:53,2007-02-17,3,,,ns1.cnolnic.net,cnolnic.net,net,net,ns2.cnolnic.net,cnolnic.net,net,net,wu
 guishan,wu 
guishan,wua...@163.com,163.com,com,com,+86.05962196589,,"xianchen|zhangzhou, 
Fujian, cn 363000",xianchen,,,zhangzhou,Fujian,363000,cn,wu guishan,wu 
guishan,wua...@163.com,163.com,com,com,+86.05962196589,"jian, cn 
363000|+86.05962196589","xianchen|zhangzhou, Fujian, cn 
363000",xianchen,,,zhangzhou,Fujian,363000,cn,wu guishan,wu 
guishan,wua...@163.com,163.com,com,com,+86.05962196589,"jian, cn 
363000|+86.05962196589","xianchen|zhangzhou, Fujian, cn 
363000",xianchen,,,zhangzhou,Fujian,363000,cn,,"The Data in Web Commerce 
Communications Limited (""WEBCC"")'s WHOIS database 
+is provided by WEBCC for information purposes, and to assist in obtaining 
+information about or related to a domain name registration record. WEBCC 
+does not guarantee its accuracy. By submitting a WHOIS query, you agree 
+that you will use this Data only for lawful purposes and that, under no 
+circumstances will you use this Data to:
+
+(1) allow, enable, or otherwise support the transmission of mass unsolicited, 
+commercial advertising or solicitations via e-mail (spam).
+(2) enable high volume, automated, electronic processes that apply to WEBCC 
+(or its systems).
+
+The compilation, repackaging, dissemination or other use of this Data is 
+expressly prohibited without the prior written consent of WEBCC. WEBCC 
+reserves the right to terminate your access to the WEBCC WHOIS database in 
+its sole discretion, including without limitation, for excessive querying 
+of the WHOIS database or for failure to otherwise abide by this policy. 
+WEBCC reserves the right to modify these terms at any time.
+
+
+Domain: 00111.NET
+Status: Protected
+
+DNS:
+   ns1.cnolnic.net
+   ns2.cnolnic.net
+
+Created: 2006-02-17
+Expires: 2007-02-17
+Last Modified: 2006-02-18 01:19:53
+
+Registrant Contact:
+   wu guishan
+   wu guishan (wua...@163.com)
+   xianchen 
+   zhangzhou, Fujian, cn 363000
+   P: +86.05962196589   F: +86.05962196589
+
+Administrative Contact:
+   wu guishan
+   wu guishan (wua...@163.com)
+   xianchen 
+   zhangzhou, Fujian, cn 363000
+   P: +86.05962196589   F: +86.05962196589
+   
+Technical Contact:
+   wu guishan
+   wu guishan (wua...@163.com)
+   xianchen 
+   zhangzhou, Fujian, cn 363000
+   P: +86.05962196589   F: +86.05962196589
+
+Billing Contact:
+   wu guishan
+   wu guishan (wua...@163.com)
+   xianchen 
+   zhangzhou, Fujian, cn 363000
+   P: +86.05962196589   F: +86.05962196589"
+62,007china.net,net,net2006-06-16 
19:33:12.22845,2005-09-05,,2006-09-05,3,,,ns1.4everdns.com,4everdns.com,com,com,ns2.4everdns.com,4everdns.com,com,com,YIN
 JIANPING,,shiguan...@163.com,163.com,com,com,,,Xige street No 87 Zhongshan 
north road|Xuzhou Jiangsu 221000|cn,Xige street No 87 Zhongshan north 
road,,,Xuzhou Jiangsu,,221000,cn,jianping 
yin,,shiguan...@163.com,163.com,com,com,86-0516-782,86-0516-782,xige 
street No 87 zhongshan north road|

[1/3] incubator-hawq git commit: HAWQ-805. Add feature test for exttab1 with new framework

2016-07-03 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master b4993b2be -> a2fa04ff6


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a2fa04ff/src/test/feature/ExternalSource/sql_source/errortbl.source
--
diff --git a/src/test/feature/ExternalSource/sql_source/errortbl.source 
b/src/test/feature/ExternalSource/sql_source/errortbl.source
deleted file mode 100644
index aeaaff4..000
--- a/src/test/feature/ExternalSource/sql_source/errortbl.source
+++ /dev/null
@@ -1,93 +0,0 @@
--- --
--- 'gpfdist' protocol
--- --
-
-CREATE EXTERNAL WEB TABLE gpfdist_status (x text)
-execute E'( python $GPHOME/bin/lib/gppinggpfdist.py @hostname@:7070 2>&1 || 
echo) '
-on SEGMENT 0
-FORMAT 'text' (delimiter '|');
-
-CREATE EXTERNAL WEB TABLE gpfdist_start (x text)
-execute E'((@gpwhich_gpfdist@ -p 7070 -d @abs_srcdir@/data  /dev/null 2>&1 &); sleep 2; echo "starting...") '
-on SEGMENT 0
-FORMAT 'text' (delimiter '|');
-
-CREATE EXTERNAL WEB TABLE gpfdist_stop (x text)
-execute E'(/bin/pkill gpfdist || killall gpfdist) > /dev/null 2>&1; echo 
"stopping..."'
-on SEGMENT 0
-FORMAT 'text' (delimiter '|');
--- start_ignore
-select * from gpfdist_stop;
-select * from gpfdist_status;
-select * from gpfdist_start;
-select * from gpfdist_status;
--- end_ignore
-
--- readable external table with error table
-
-CREATE EXTERNAL TABLE EXT_NATION1 ( N_NATIONKEY  INTEGER ,
-N_NAME   CHAR(25) ,
-N_REGIONKEY  INTEGER ,
-N_COMMENTVARCHAR(152))
-location ('gpfdist://@hostname@:7070/nation_error50.tbl')
-FORMAT 'text' (delimiter '|')
-LOG ERRORS INTO EXT_NATION_ERROR1 SEGMENT REJECT LIMIT 51;
-
-CREATE EXTERNAL TABLE EXT_NATION2 ( N_NATIONKEY  INTEGER ,
-N_NAME   CHAR(25) ,
-N_REGIONKEY  INTEGER ,
-N_COMMENTVARCHAR(152))
-location ('gpfdist://@hostname@:7070/nation_error50.tbl')
-FORMAT 'text' (delimiter '|')
-LOG ERRORS INTO EXT_NATION_ERROR2 SEGMENT REJECT LIMIT 50;
-
-CREATE EXTERNAL TABLE EXT_NATION3 ( N_NATIONKEY  INTEGER ,
-N_NAME   CHAR(25) ,
-N_REGIONKEY  INTEGER ,
-N_COMMENTVARCHAR(152))
-location ('gpfdist://@hostname@:7070/nation.tbl')
-FORMAT 'text' (delimiter '|')
-LOG ERRORS INTO EXT_NATION_ERROR3 SEGMENT REJECT LIMIT 50;
-
--- use existing error table
-CREATE EXTERNAL TABLE EXT_NATION_WITH_EXIST_ERROR_TABLE ( N_NATIONKEY  INTEGER 
,
-N_NAME   CHAR(25) ,
-N_REGIONKEY  INTEGER ,
-N_COMMENTVARCHAR(152))
-location ('gpfdist://@hostname@:7070/nation_error50.tbl')
-FORMAT 'text' (delimiter '|')
-LOG ERRORS INTO EXT_NATION_ERROR1 SEGMENT REJECT LIMIT 51;
-
-select * from EXT_NATION1;
-select count(*) from EXT_NATION_ERROR1; -- should be 50
-select * from EXT_NATION_WITH_EXIST_ERROR_TABLE;
-select count(*) from EXT_NATION_ERROR1; -- should be 100
-select * from EXT_NATION2; -- should fail
-select count(*) from EXT_NATION_ERROR2; -- should be empty
-select * from EXT_NATION3;
-select count(*) from EXT_NATION_ERROR3; -- should be empty
-
-truncate EXT_NATION_ERROR1;
-select * from EXT_NATION1 as x, EXT_NATION3 as y where x.n_nationkey = 
y.n_nationkey;
-select count(*) from EXT_NATION_ERROR1; -- should be 50
-
-select * from EXT_NATION1 as x, EXT_NATION1 as y where x.n_nationkey = 
y.n_nationkey; --should fail on self join
-select * from EXT_NATION1 as x, EXT_NATION_WITH_EXIST_ERROR_TABLE as y where 
x.n_nationkey = y.n_nationkey; --should fail with the same error table
-
--- should fail on writable external table
-CREATE WRITABLE EXTERNAL TABLE EXT_NATION_WRITABLE ( N_NATIONKEY  INTEGER ,
-N_NAME   CHAR(25) ,
-N_REGIONKEY  INTEGER ,
-N_COMMENTVARCHAR(152))
-LOCATION ('gpfdist://@hostname@:7070/nation_error50.tbl') 
-FORMAT 'text' (delimiter '|')
-LOG ERRORS INTO EXT_NATION_ERROR_WRITABLE SEGMENT REJECT LIMIT 5;
-
--- start_ignore
-select * from gpfdist_stop;
-select * from gpfdist_status;
--- end_ignore
-
-drop external table gpfdist_status;
-drop external table gpfdist_start;
-drop external table gpfdist_stop;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a2fa04ff/src/test/feature/ExternalSource/test_exttab.cpp
--
diff --git a/src/test/feature/ExternalSource/test_exttab.cpp 
b/src/test/feature/ExternalSource/test_exttab.cpp
new file mode 100644
index 000..274c0c9
--- /dev/null
+++ b/src/test/feature/ExternalSource/test_exttab.cpp
@@ -0,0 +1,35 @@
+#include "gtest/gtest.h"
+
+#include "lib/sql_util.h"
+#include "lib/file_r

incubator-hawq git commit: HAWQ-867. Replace the git-submobule mechanism with git-clone

2016-06-30 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 4f39857f1 -> 497ae5db9


HAWQ-867. Replace the git-submobule mechanism with git-clone


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/497ae5db
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/497ae5db
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/497ae5db

Branch: refs/heads/master
Commit: 497ae5db996094150e475659e06eea929e209841
Parents: 4f39857
Author: Paul Guo 
Authored: Thu Jun 30 18:34:27 2016 +0800
Committer: Paul Guo 
Committed: Fri Jul 1 11:20:00 2016 +0800

--
 .gitmodules | 15 -
 contrib/Makefile| 20 --
 contrib/pgcrypto.patch  | 13 
 depends/thirdparty/.gitignore   | 10 +--
 depends/thirdparty/gp-xerces|  1 -
 depends/thirdparty/gp-xerces.commit |  1 +
 .../gp-xerces/git_keep_empty_directory.readme   |  8 +++
 depends/thirdparty/gporca   |  1 -
 depends/thirdparty/gporca.commit|  1 +
 .../gporca/git_keep_empty_directory.readme  |  8 +++
 depends/thirdparty/gpos |  1 -
 depends/thirdparty/gpos.commit  |  1 +
 .../gpos/git_keep_empty_directory.readme|  8 +++
 depends/thirdparty/postgres |  1 -
 depends/thirdparty/postgres.commit  |  1 +
 .../postgres/git_keep_empty_directory.readme|  8 +++
 src/.gitignore  |  1 -
 src/Makefile.global.in  |  9 ---
 src/backend/Makefile| 68 
 src/pl/.gitignore   |  2 +
 src/pl/Makefile | 21 --
 src/pl/plr  |  1 -
 src/pl/plr.commit   |  1 +
 src/pl/plr/git_keep_empty_directory.readme  |  8 +++
 24 files changed, 138 insertions(+), 71 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/497ae5db/.gitmodules
--
diff --git a/.gitmodules b/.gitmodules
deleted file mode 100644
index 6e33770..000
--- a/.gitmodules
+++ /dev/null
@@ -1,15 +0,0 @@
-[submodule "depends/thirdparty/gp-xerces"]
-   path = depends/thirdparty/gp-xerces
-   url = https://github.com/greenplum-db/gp-xerces.git
-[submodule "depends/thirdparty/gpos"]
-   path = depends/thirdparty/gpos
-   url = https://github.com/greenplum-db/gpos.git
-[submodule "depends/thirdparty/gporca"]
-   path = depends/thirdparty/gporca
-   url = https://github.com/greenplum-db/gporca.git
-[submodule "src/pl/plr"]
-   path = src/pl/plr
-   url = https://github.com/jconway/plr.git
-[submodule "depends/thirdparty/postgres"]
-   path = depends/thirdparty/postgres
-   url = https://github.com/postgres/postgres.git

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/497ae5db/contrib/Makefile
--
diff --git a/contrib/Makefile b/contrib/Makefile
index a3c1804..1d4939f 100644
--- a/contrib/Makefile
+++ b/contrib/Makefile
@@ -14,10 +14,22 @@ WANTED_DIRS = \
 ifeq ($(with_pgcrypto), yes)
 WANTED_DIRS += pgcrypto
 
-# fixme: depend on pgcrypto.patch also.
-pgcrypto_prepare: pg_prepare
-   if [ ! -e $(abs_top_builddir)/$(subdir)/pgcrypto_prepare_timestamp ]; 
then \
-   (cd 
$(abs_top_srcdir)/depends/thirdparty/postgres/contrib/pgcrypto; git apply 
$(abs_top_srcdir)/$(subdir)/pgcrypto.patch) && \
+pgcrypto_commit := $(shell cat 
$(abs_top_srcdir)/depends/thirdparty/postgres.commit)
+
+pgcrypto_prepare_timestamp: pgcrypto.patch 
$(abs_top_srcdir)/depends/thirdparty/postgres.commit
+   rm -f pgcrypto_prepare_timestamp
+
+# This includes an ugly hack which remove two added files after "git-reset".
+# I do this because "git apply" fails if the two files have existed.
+pgcrypto_prepare: pgcrypto_prepare_timestamp
+   if [ ! -f $(abs_top_builddir)/$(subdir)/pgcrypto_prepare_timestamp ]; 
then \
+   [ "x$(pgcrypto_commit)" != "x" ] || exit 1; \
+   cd $(abs_top_srcdir)/depends/thirdparty; mkdir -p postgres; cd 
postgres; \
+   rm -f git_keep_empty_directory.readme; \
+   [ ! -d .git ] && git clone 
https://github.com/postgres/postgres.git .; \
+   git reset --hard $(pgcrypto_commit) || (echo Now fetch from 
upstream and try again; git fetch origin master; git reset --hard 
$(pgcrypto_commit)) || exit 2; \
+   rm -f contrib/pgcrypto/pgcrypto.sql.in 
contrib/pgcrypto/uninstall_pgcrypto.sql; \
+   git apply $(abs_

incubator-hawq git commit: HAWQ-869. Add regression test for less tuple is inserted issue in prepared statement

2016-06-28 Thread huor
Repository: incubator-hawq
Updated Branches:
  refs/heads/master 706882c20 -> 01a44739b


HAWQ-869. Add regression test for less tuple is inserted issue in prepared 
statement


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/01a44739
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/01a44739
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/01a44739

Branch: refs/heads/master
Commit: 01a44739b0a701fc8b81ed91fdda52e618ef6b37
Parents: 706882c
Author: Ruilong Huo 
Authored: Mon Jun 27 16:04:46 2016 +0800
Committer: Ruilong Huo 
Committed: Wed Jun 29 10:28:13 2016 +0800

--
 .../PreparedStatement/TestPreparedStatement.cpp |  38 +++
 .../feature/PreparedStatement/ans/insert.ans| 266 +++
 .../feature/PreparedStatement/ans/proba.ans | 230 
 .../PreparedStatement/ans/proba_execute.ans | 228 
 .../feature/PreparedStatement/sql/insert.sql| 130 +
 .../feature/PreparedStatement/sql/proba.sql | 147 ++
 .../PreparedStatement/sql/proba_execute.sql | 145 ++
 7 files changed, 1184 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/01a44739/src/test/feature/PreparedStatement/TestPreparedStatement.cpp
--
diff --git a/src/test/feature/PreparedStatement/TestPreparedStatement.cpp 
b/src/test/feature/PreparedStatement/TestPreparedStatement.cpp
new file mode 100755
index 000..a8684ca
--- /dev/null
+++ b/src/test/feature/PreparedStatement/TestPreparedStatement.cpp
@@ -0,0 +1,38 @@
+#include "gtest/gtest.h"
+
+#include "lib/sql_util.h"
+
+
+class TestPreparedStatement: public ::testing::Test
+{
+   public:
+   TestPreparedStatement() {}
+   ~TestPreparedStatement() {}
+};
+
+// HAWQ-800: https://issues.apache.org/jira/browse/HAWQ-800
+// HAWQ-835: https://issues.apache.org/jira/browse/HAWQ-835
+TEST_F(TestPreparedStatement, TestPreparedStatementPrepare)
+{
+   hawq::test::SQLUtility util;
+   util.execSQLFile("PreparedStatement/sql/proba.sql",
+"PreparedStatement/ans/proba.ans");
+}
+
+// HAWQ-800: https://issues.apache.org/jira/browse/HAWQ-800
+// HAWQ-835: https://issues.apache.org/jira/browse/HAWQ-835
+TEST_F(TestPreparedStatement, TestPreparedStatementExecute)
+{
+   hawq::test::SQLUtility util;
+   util.execSQLFile("PreparedStatement/sql/proba_execute.sql",
+"PreparedStatement/ans/proba_execute.ans");
+}
+
+// HAWQ-800: https://issues.apache.org/jira/browse/HAWQ-800
+// HAWQ-835: https://issues.apache.org/jira/browse/HAWQ-835
+TEST_F(TestPreparedStatement, TestPreparedStatementInsert)
+{
+   hawq::test::SQLUtility util;
+   util.execSQLFile("PreparedStatement/sql/insert.sql",
+"PreparedStatement/ans/insert.ans");
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/01a44739/src/test/feature/PreparedStatement/ans/insert.ans
--
diff --git a/src/test/feature/PreparedStatement/ans/insert.ans 
b/src/test/feature/PreparedStatement/ans/insert.ans
new file mode 100644
index 000..32b2f4f
--- /dev/null
+++ b/src/test/feature/PreparedStatement/ans/insert.ans
@@ -0,0 +1,266 @@
+-- start_ignore
+SET SEARCH_PATH=TestPreparedStatement_TestPreparedStatementInsert;
+SET
+-- end_ignore
+-- start_ignore
+drop function if exists f1();
+psql:/tmp/TestPreparedStatement_TestPreparedStatementInsert.sql:5: NOTICE:  
function f1() does not exist, skipping
+DROP FUNCTION
+drop function if exists f2();
+psql:/tmp/TestPreparedStatement_TestPreparedStatementInsert.sql:6: NOTICE:  
function f2() does not exist, skipping
+DROP FUNCTION
+drop table if exists t1;
+psql:/tmp/TestPreparedStatement_TestPreparedStatementInsert.sql:7: NOTICE:  
table "t1" does not exist, skipping
+DROP TABLE
+drop table if exists t2;
+psql:/tmp/TestPreparedStatement_TestPreparedStatementInsert.sql:8: NOTICE:  
table "t2" does not exist, skipping
+DROP TABLE
+-- end_ignore
+create table t1 (id int);
+CREATE TABLE
+insert into t1 values (1);
+INSERT 0 1
+create table t2 (id int);
+CREATE TABLE
+CREATE OR REPLACE FUNCTION f1()
+  RETURNS text
+  LANGUAGE plpgsql
+AS
+$body$
+DECLARE
+l_rec record;
+l_item record;
+   l_count integer;
+BEGIN
+
+   RAISE NOTICE '--- Initial content of t1: begin ---';
+   SELECT count(*) INTO l_count FROM t1;
+   RAISE NOTICE '--- # tuple: %', l_count;
+RAISE NOTICE 'id';
+FOR l_item IN SELECT * FROM t1 LOOP
+RAISE NOTICE '%', quote_ident(l_item.id);
+END LOOP;
+   RAISE NOTICE '--- Initial content of t1: end ---';
+
+FOR l_rec IN ( SELECT generate_series(1, 

  1   2   3   4   >