danny0405 commented on a change in pull request #11935:
URL: https://github.com/apache/flink/pull/11935#discussion_r422761906
##########
File path:
flink-table/flink-sql-parser-hive/src/main/codegen/includes/parserImpls.ftl
##########
@@ -218,4 +218,549 @@ SqlNode TableOption() :
{
return new SqlTableOption(key, value, getPos());
}
-}
\ No newline at end of file
+}
+
+
+SqlCreate SqlCreateTemporary(Span s, boolean replace) :
+{
+ boolean isTemporary = false;
+ SqlCreate create;
+}
+{
+ [ <TEMPORARY> {isTemporary = true;} ]
+
+ create = SqlCreateTable(s, isTemporary)
+ {
+ return create;
+ }
+}
+
+/**
+* Parse a "Show Tables" metadata query command.
+*/
+SqlShowTables SqlShowTables() :
+{
+}
+{
+ <SHOW> <TABLES>
+ {
+ return new SqlShowTables(getPos());
+ }
+}
+
+/**
+ * Here we add Rich in className to distinguish from calcite's original
SqlDescribeTable.
+ */
+SqlRichDescribeTable SqlRichDescribeTable() :
+{
+ SqlIdentifier tableName;
+ SqlParserPos pos;
+ boolean isExtended = false;
+}
+{
+ <DESCRIBE> { pos = getPos();}
+ [ LOOKAHEAD(2) ( <EXTENDED> | <FORMATTED> ) { isExtended = true;} ]
+ tableName = CompoundIdentifier()
+ {
+ return new SqlRichDescribeTable(pos, tableName, isExtended);
+ }
+}
+
+SqlCreate SqlCreateTable(Span s, boolean isTemporary) :
+{
+ final SqlParserPos startPos = s.pos();
+ SqlIdentifier tableName;
+ SqlNodeList primaryKeyList = SqlNodeList.EMPTY;
+ List<SqlNodeList> uniqueKeysList = new ArrayList<SqlNodeList>();
+ SqlNodeList columnList = SqlNodeList.EMPTY;
+ SqlCharStringLiteral comment = null;
+
+ SqlNodeList propertyList;
+ SqlNodeList partitionColumns = SqlNodeList.EMPTY;
+ SqlParserPos pos = startPos;
+ boolean isExternal = false;
+ HiveTableRowFormat rowFormat = null;
+ HiveTableStoredAs storedAs = null;
+ SqlCharStringLiteral location = null;
+ HiveTableCreationContext ctx = new HiveTableCreationContext();
+}
+{
+ [ <EXTERNAL> { isExternal = true; } ]
+ <TABLE> { propertyList = new SqlNodeList(getPos()); }
+
+ tableName = CompoundIdentifier()
+ [
+ <LPAREN> { pos = getPos(); }
+ TableColumn(ctx)
+ (
+ <COMMA> TableColumn(ctx)
+ )*
+ {
+ pos = pos.plus(getPos());
+ columnList = new SqlNodeList(ctx.columnList, pos);
+ }
+ <RPAREN>
+ ]
+ [ <COMMENT> <QUOTED_STRING> {
+ comment = createStringLiteral(token.image, getPos());
+ }]
+ [
+ <PARTITIONED> <BY>
+ <LPAREN>
+ {
+ List<SqlNode> partCols = new ArrayList();
+ if ( columnList == SqlNodeList.EMPTY ) {
+ columnList = new SqlNodeList(pos.plus(getPos()));
+ }
+ }
+ PartColumnDef(partCols)
+ (
+ <COMMA> PartColumnDef(partCols)
+ )*
+ {
+ partitionColumns = new SqlNodeList(partCols, pos.plus(getPos()));
+ }
+ <RPAREN>
+ ]
+ [
+ <ROW> <FORMAT>
+ rowFormat = TableRowFormat(getPos())
+ ]
+ [
+ <STORED> <AS>
+ storedAs = TableStoredAs(getPos())
+ ]
+ [
+ <LOCATION> <QUOTED_STRING>
+ { location = createStringLiteral(token.image, getPos()); }
+ ]
+ [
+ <TBLPROPERTIES>
+ {
+ SqlNodeList props = TableProperties();
+ for (SqlNode node : props) {
+ propertyList.add(node);
+ }
+ }
+ ]
+ {
+ return new SqlCreateHiveTable(startPos.plus(getPos()),
+ tableName,
+ columnList,
+ ctx,
+ propertyList,
+ partitionColumns,
+ comment,
+ isTemporary,
+ isExternal,
+ rowFormat,
+ storedAs,
+ location);
+ }
+}
+
+SqlDrop SqlDropTable(Span s, boolean replace) :
+{
+ SqlIdentifier tableName = null;
+ boolean ifExists = false;
+}
+{
+ <TABLE>
+
+ (
+ <IF> <EXISTS> { ifExists = true; }
+ |
+ { ifExists = false; }
+ )
+
+ tableName = CompoundIdentifier()
+
+ {
+ return new SqlDropTable(s.pos(), tableName, ifExists, false);
+ }
+}
+
+void TableColumn2(List<SqlNode> list) :
+{
+ SqlParserPos pos;
+ SqlIdentifier name;
+ SqlDataTypeSpec type;
+ SqlCharStringLiteral comment = null;
+}
+{
+ name = SimpleIdentifier()
+ type = ExtendedDataType()
+ [ <COMMENT> <QUOTED_STRING> {
+ comment = createStringLiteral(token.image, getPos());
+ }]
+ {
+ SqlTableColumn tableColumn = new SqlTableColumn(name, type, null,
comment, getPos());
+ list.add(tableColumn);
+ }
+}
+
+void PartColumnDef(List<SqlNode> list) :
+{
+ SqlParserPos pos;
+ SqlIdentifier name;
+ SqlDataTypeSpec type;
+ SqlCharStringLiteral comment = null;
+}
+{
+ name = SimpleIdentifier()
+ type = DataType()
+ [ <COMMENT> <QUOTED_STRING> {
+ comment = createStringLiteral(token.image, getPos());
+ }]
+ {
+ type = type.withNullable(true);
+ SqlTableColumn tableColumn = new SqlTableColumn(name, type, null,
comment, getPos());
+ list.add(tableColumn);
+ }
+}
+
+void TableColumn(HiveTableCreationContext context) :
+{
+}
+{
+ (LOOKAHEAD(2)
+ TableColumnWithConstraint(context)
+ |
+ TableConstraint(context)
+ )
+}
+
+/** Parses a table constraint for CREATE TABLE. */
+void TableConstraint(HiveTableCreationContext context) :
+{
+ SqlIdentifier constraintName = null;
+ final SqlLiteral spec;
+ final SqlNodeList columns;
+}
+{
+ [ constraintName = ConstraintName() ]
+ spec = TableConstraintSpec()
+ columns = ParenthesizedSimpleIdentifierList()
+ context.pkTrait = ConstraintTrait()
+ {
+ SqlTableConstraint tableConstraint = new SqlTableConstraint(
+ constraintName,
+ spec,
+ columns,
+
SqlConstraintEnforcement.NOT_ENFORCED.symbol(getPos()),
+ true,
+ getPos());
+ context.constraints.add(tableConstraint);
+ }
+}
+
+SqlLiteral TableConstraintSpec() :
+{
+ SqlLiteral spec;
+}
+{
+ <PRIMARY> <KEY>
+ {
+ spec = SqlUniqueSpec.PRIMARY_KEY.symbol(getPos());
+ return spec;
+ }
+}
+
+SqlIdentifier ConstraintName() :
+{
+ SqlIdentifier constraintName;
+}
+{
+ <CONSTRAINT> constraintName = SimpleIdentifier() {
+ return constraintName;
+ }
+}
+
+void TableColumnWithConstraint(HiveTableCreationContext context) :
+{
+ SqlParserPos pos;
+ SqlIdentifier name;
+ SqlDataTypeSpec type;
+ SqlCharStringLiteral comment = null;
+}
+{
+ name = SimpleIdentifier()
+ type = ExtendedDataType()
+ {
+ // we have NOT NULL column constraint here
+ if (!type.getNullable()) {
+ if(context.notNullTraits == null) {
+ context.notNullTraits = new ArrayList();
+ }
+ context.notNullTraits.add(ConstraintTrait());
+ }
+ SqlTableColumn tableColumn = new SqlTableColumn(name, type, null,
comment, getPos());
+ context.columnList.add(tableColumn);
+ }
+ [ <COMMENT> <QUOTED_STRING> {
+ comment = createStringLiteral(token.image, getPos());
+ }]
+}
+
+byte ConstraintTrait() :
+{
+ // a constraint is by default ENABLE NOVALIDATE RELY
+ byte constraintTrait = HiveDDLUtils.defaultTrait();
+}
+{
+ [
+ <ENABLE>
+ |
+ <DISABLE> { constraintTrait =
HiveDDLUtils.disableConstraint(constraintTrait); }
+ ]
+ [
+ <NOVALIDATE>
+ |
+ <VALIDATE> { constraintTrait =
HiveDDLUtils.validateConstraint(constraintTrait); }
+ ]
+ [
+ <RELY>
+ |
+ <NORELY> { constraintTrait =
HiveDDLUtils.noRelyConstraint(constraintTrait); }
+ ]
+ { return constraintTrait; }
+}
+
+/**
+* Different with {@link #DataType()}, we support a [ NULL | NOT NULL ] suffix
syntax for both the
+* collection element data type and the data type itself.
+*
+* <p>See {@link #SqlDataTypeSpec} for the syntax details of {@link
#DataType()}.
+*/
+SqlDataTypeSpec ExtendedDataType() :
+{
+ SqlTypeNameSpec typeName;
+ final Span s;
+ boolean elementNullable = true;
+ boolean nullable = true;
+}
+{
+ <#-- #DataType does not take care of the nullable attribute. -->
+ typeName = TypeName() {
+ s = span();
+ }
+ (
+ LOOKAHEAD(3)
+ elementNullable = NullableOptDefaultTrue()
+ typeName = ExtendedCollectionsTypeName(typeName, elementNullable)
+ )*
+ nullable = NullableOptDefaultTrue()
+ {
+ return new SqlDataTypeSpec(typeName,
s.end(this)).withNullable(nullable);
+ }
+}
+
+HiveTableStoredAs TableStoredAs(SqlParserPos pos) :
+{
+ SqlIdentifier fileFormat = null;
+ SqlCharStringLiteral inputFormat = null;
+ SqlCharStringLiteral outputFormat = null;
+}
+{
+ (
+ LOOKAHEAD(2)
+ <INPUTFORMAT> <QUOTED_STRING> { inputFormat =
createStringLiteral(token.image, getPos()); }
+ <OUTPUTFORMAT> <QUOTED_STRING> { outputFormat =
createStringLiteral(token.image, getPos()); }
+ { return HiveTableStoredAs.ofInputOutputFormat(pos, inputFormat,
outputFormat); }
+ |
+ fileFormat = SimpleIdentifier()
+ { return HiveTableStoredAs.ofFileFormat(pos, fileFormat); }
+ )
+}
+
+HiveTableRowFormat TableRowFormat(SqlParserPos pos) :
+{
+ SqlCharStringLiteral fieldsTerminator = null;
+ SqlCharStringLiteral escape = null;
+ SqlCharStringLiteral collectionTerminator = null;
+ SqlCharStringLiteral mapKeyTerminator = null;
+ SqlCharStringLiteral linesTerminator = null;
+ SqlCharStringLiteral nullAs = null;
+ SqlCharStringLiteral serdeClass = null;
+ SqlNodeList serdeProps = null;
+}
+{
+ (
+ <DELIMITED>
+ [ <FIELDS> <TERMINATED> <BY> <QUOTED_STRING>
+ { fieldsTerminator = createStringLiteral(token.image, getPos()); }
+ [ <ESCAPED> <BY> <QUOTED_STRING> { escape =
createStringLiteral(token.image, getPos()); } ]
+ ]
+ [ <COLLECTION> <ITEMS> <TERMINATED> <BY> <QUOTED_STRING> {
collectionTerminator = createStringLiteral(token.image, getPos()); } ]
+ [ <MAP> <KEYS> <TERMINATED> <BY> <QUOTED_STRING> { mapKeyTerminator =
createStringLiteral(token.image, getPos()); } ]
+ [ <LINES> <TERMINATED> <BY> <QUOTED_STRING> { linesTerminator =
createStringLiteral(token.image, getPos()); } ]
+ [ <NULL> <DEFINED> <AS> <QUOTED_STRING> { nullAs =
createStringLiteral(token.image, getPos()); } ]
+ { return HiveTableRowFormat.withDelimited(pos, fieldsTerminator, escape,
collectionTerminator, mapKeyTerminator, linesTerminator, nullAs); }
+ |
+ <SERDE> <QUOTED_STRING>
+ {
+ serdeClass = createStringLiteral(token.image, getPos());
+ }
+ [ <WITH> <SERDEPROPERTIES> serdeProps = TableProperties() ]
+ { return HiveTableRowFormat.withSerDe(pos, serdeClass, serdeProps); }
+ )
+}
+
+/**
+* A sql type name extended basic data type, it has a counterpart basic
+* sql type name but always represents as a special alias compared with the
standard name.
+*
+* <p>For example, STRING is synonym of VARCHAR(INT_MAX)
+* and BYTES is synonym of VARBINARY(INT_MAX).
+*/
+SqlTypeNameSpec ExtendedSqlBasicTypeName() :
+{
+ final SqlTypeName typeName;
+ final String typeAlias;
+ int precision = -1;
+}
+{
+ (
+ <STRING> {
+ typeName = SqlTypeName.VARCHAR;
+ typeAlias = token.image;
+ precision = Integer.MAX_VALUE;
+ }
+ |
+ <BYTES> {
+ typeName = SqlTypeName.VARBINARY;
+ typeAlias = token.image;
Review comment:
I mean for default precision `VARCHAR` in Hive, the precision should be
655235 instead of `Integer.MAX_INT`.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]