This is an automated email from the ASF dual-hosted git repository.
yiguolei pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/master by this push:
new 133800a67c2 [Feature](auto-inc) Add start value for auto increment
column (#30512)
133800a67c2 is described below
commit 133800a67c22a4d0316d521689adda2841079f95
Author: abmdocrt <[email protected]>
AuthorDate: Tue Feb 6 11:59:37 2024 +0800
[Feature](auto-inc) Add start value for auto increment column (#30512)
---
be/src/vec/sink/autoinc_buffer.cpp | 2 +-
.../antlr4/org/apache/doris/nereids/DorisParser.g4 | 2 +-
fe/fe-core/src/main/cup/sql_parser.cup | 27 ++-
.../java/org/apache/doris/analysis/ColumnDef.java | 35 +--
.../doris/catalog/AutoIncrementGenerator.java | 3 +-
.../main/java/org/apache/doris/catalog/Column.java | 32 +--
.../java/org/apache/doris/catalog/OlapTable.java | 3 +-
.../apache/doris/datasource/InternalCatalog.java | 4 +-
.../doris/datasource/jdbc/client/JdbcClient.java | 2 +-
.../datasource/jdbc/client/JdbcMySQLClient.java | 4 +-
.../doris/nereids/parser/LogicalPlanBuilder.java | 16 +-
.../plans/commands/info/ColumnDefinition.java | 16 +-
.../storage/test_dup_tab_auto_inc_10000.out | 6 +-
.../storage/test_dup_tab_auto_inc_basic.out | 36 ++--
...=> test_dup_tab_auto_inc_start_value_10000.out} | 6 +-
.../test_dup_tab_auto_inc_start_value_basic.out | 34 +++
.../test_dup_tab_auto_inc_start_value_col.out | 23 ++
...test_dup_tab_auto_inc_start_value_with_null.out | 72 +++++++
.../storage/test_dup_tab_auto_inc_with_null.out | 74 +++----
.../unique/auto_inc_partial_update1.csv | 10 +-
.../unique/test_unique_table_auto_inc.out | 188 ++++++++---------
.../hive/test_autoinc_broker_load.out | 42 ++--
.../storage/test_dup_tab_auto_inc_col.groovy | 23 ++
.../test_dup_tab_auto_inc_start_value_10000.groovy | 115 ++++++++++
.../test_dup_tab_auto_inc_start_value_basic.groovy | 114 ++++++++++
...> test_dup_tab_auto_inc_start_value_col.groovy} | 72 +++++--
...t_dup_tab_auto_inc_start_value_with_null.groovy | 235 +++++++++++++++++++++
27 files changed, 943 insertions(+), 253 deletions(-)
diff --git a/be/src/vec/sink/autoinc_buffer.cpp
b/be/src/vec/sink/autoinc_buffer.cpp
index d7ae2920091..844d8ed8524 100644
--- a/be/src/vec/sink/autoinc_buffer.cpp
+++ b/be/src/vec/sink/autoinc_buffer.cpp
@@ -67,7 +67,7 @@ Status AutoIncIDBuffer::sync_request_ids(size_t length,
std::swap(_front_buffer, _backend_buffer);
}
- DCHECK(length <= _front_buffer.second);
+ DCHECK_LE(length, _front_buffer.second);
result->emplace_back(_front_buffer.first, length);
_front_buffer.first += length;
_front_buffer.second -= length;
diff --git a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4
b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4
index 3943a30ee2a..c74dfd0cc2b 100644
--- a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4
+++ b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4
@@ -490,7 +490,7 @@ columnDefs
columnDef
: colName=identifier type=dataType
- KEY? (aggType=aggTypeDef)? ((NOT NULL) | NULL)? (AUTO_INCREMENT)?
+ KEY? (aggType=aggTypeDef)? ((NOT NULL) | NULL)? (AUTO_INCREMENT
(LEFT_PAREN autoIncInitValue=number RIGHT_PAREN)?)?
(DEFAULT (nullValue=NULL | INTEGER_VALUE | stringValue=STRING_LITERAL
| CURRENT_TIMESTAMP (LEFT_PAREN defaultValuePrecision=number
RIGHT_PAREN)?))?
(ON UPDATE CURRENT_TIMESTAMP (LEFT_PAREN onUpdateValuePrecision=number
RIGHT_PAREN)?)?
diff --git a/fe/fe-core/src/main/cup/sql_parser.cup
b/fe/fe-core/src/main/cup/sql_parser.cup
index 708986bcbce..bcef197ca99 100644
--- a/fe/fe-core/src/main/cup/sql_parser.cup
+++ b/fe/fe-core/src/main/cup/sql_parser.cup
@@ -915,7 +915,7 @@ nonterminal ParseNode load_property;
nonterminal List<ParseNode> opt_load_property_list;
// Boolean
-nonterminal Boolean opt_negative, opt_is_allow_null, opt_is_key,
opt_read_only, opt_aggregate, opt_local, opt_is_auto_inc;
+nonterminal Boolean opt_negative, opt_is_allow_null, opt_is_key,
opt_read_only, opt_aggregate, opt_local;
nonterminal String opt_from_rollup, opt_to_rollup;
nonterminal ColumnPosition opt_col_pos;
@@ -972,6 +972,7 @@ nonterminal StorageBackend storage_backend;
nonterminal ArrayList<LockTable> opt_lock_tables_list;
nonterminal LockTable lock_table;
+nonterminal Long opt_auto_inc_init_value;
// workload policy/group
nonterminal String policy_condition_op, policy_condition_value;
@@ -3718,21 +3719,21 @@ opt_is_key ::=
;
column_definition ::=
- ident:columnName type_def:typeDef opt_is_key:isKey
opt_is_allow_null:isAllowNull opt_is_auto_inc:isAutoInc
opt_default_value:defaultValue opt_comment:comment
+ ident:columnName type_def:typeDef opt_is_key:isKey
opt_is_allow_null:isAllowNull opt_auto_inc_init_value:autoIncInitValue
opt_default_value:defaultValue opt_comment:comment
{:
- ColumnDef columnDef = new ColumnDef(columnName, typeDef, isKey, null,
isAllowNull, isAutoInc, defaultValue, comment);
+ ColumnDef columnDef = new ColumnDef(columnName, typeDef, isKey, null,
isAllowNull, autoIncInitValue, defaultValue, comment);
RESULT = columnDef;
:}
- | ident:columnName type_def:typeDef IDENT:fnName LPAREN
type_def_nullable_list:list RPAREN opt_is_auto_inc:isAutoInc
opt_default_value:defaultValue opt_comment:comment
+ | ident:columnName type_def:typeDef IDENT:fnName LPAREN
type_def_nullable_list:list RPAREN opt_auto_inc_init_value:autoIncInitValue
opt_default_value:defaultValue opt_comment:comment
{:
ColumnDef columnDef = new ColumnDef(columnName, typeDef, false,
AggregateType.GENERIC_AGGREGATION, false, defaultValue, comment);
columnDef.setGenericAggregationName(fnName);
columnDef.setGenericAggregationArguments(list);
RESULT = columnDef;
:}
- | ident:columnName type_def:typeDef opt_is_key:isKey opt_agg_type:aggType
opt_is_allow_null:isAllowNull opt_is_auto_inc:isAutoInc
opt_default_value:defaultValue opt_comment:comment
+ | ident:columnName type_def:typeDef opt_is_key:isKey opt_agg_type:aggType
opt_is_allow_null:isAllowNull opt_auto_inc_init_value:autoIncInitValue
opt_default_value:defaultValue opt_comment:comment
{:
- ColumnDef columnDef = new ColumnDef(columnName, typeDef, isKey,
aggType, isAllowNull, isAutoInc, defaultValue, comment);
+ ColumnDef columnDef = new ColumnDef(columnName, typeDef, isKey,
aggType, isAllowNull, autoIncInitValue, defaultValue, comment);
RESULT = columnDef;
:}
| ident:columnName type_def:typeDef opt_is_key:isKey opt_agg_type:aggType
LPAREN type_def_nullable_list:list RPAREN opt_default_value:defaultValue
opt_comment:comment
@@ -3772,15 +3773,23 @@ opt_is_allow_null ::=
:}
;
-opt_is_auto_inc ::=
+opt_auto_inc_init_value ::=
{:
- RESULT = false;
+ RESULT = Long.valueOf(-1);
:}
| KW_AUTO_INCREMENT
{:
- RESULT = true;
+ RESULT = Long.valueOf(1);
+ :}
+ | KW_AUTO_INCREMENT LPAREN INTEGER_LITERAL:auto_inc_initial_value RPAREN
+ {:
+ if (auto_inc_initial_value.longValue() < 0) {
+ throw new AnalysisException("AUTO_INCREMENT start value can not be
negative.");
+ }
+ RESULT = auto_inc_initial_value.longValue();
:}
;
+
opt_comment ::=
/* empty */
{:
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ColumnDef.java
b/fe/fe-core/src/main/java/org/apache/doris/analysis/ColumnDef.java
index 9cd1ca44815..c3f51f1a2f5 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ColumnDef.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ColumnDef.java
@@ -178,18 +178,19 @@ public class ColumnDef {
private boolean isKey;
private boolean isAllowNull;
private boolean isAutoInc;
+ private long autoIncInitValue;
private DefaultValue defaultValue;
private String comment;
private boolean visible;
private int clusterKeyId = -1;
public ColumnDef(String name, TypeDef typeDef) {
- this(name, typeDef, false, null, false, false, DefaultValue.NOT_SET,
"");
+ this(name, typeDef, false, null, false, -1, DefaultValue.NOT_SET, "");
}
public ColumnDef(String name, TypeDef typeDef, boolean isKey,
AggregateType aggregateType,
- boolean isAllowNull, boolean isAutoInc, DefaultValue
defaultValue, String comment) {
- this(name, typeDef, isKey, aggregateType, isAllowNull, isAutoInc,
defaultValue, comment, true);
+ boolean isAllowNull, long autoIncInitValue, DefaultValue
defaultValue, String comment) {
+ this(name, typeDef, isKey, aggregateType, isAllowNull,
autoIncInitValue, defaultValue, comment, true);
}
public ColumnDef(String name, TypeDef typeDef, boolean isAllowNull) {
@@ -197,18 +198,19 @@ public class ColumnDef {
}
public ColumnDef(String name, TypeDef typeDef, boolean isKey,
AggregateType aggregateType,
- boolean isAllowNull, DefaultValue defaultValue, String
comment) {
- this(name, typeDef, isKey, aggregateType, isAllowNull, false,
defaultValue, comment, true);
+ boolean isAllowNull, DefaultValue defaultValue, String comment) {
+ this(name, typeDef, isKey, aggregateType, isAllowNull, -1,
defaultValue, comment, true);
}
public ColumnDef(String name, TypeDef typeDef, boolean isKey,
AggregateType aggregateType,
- boolean isAllowNull, boolean isAutoInc, DefaultValue defaultValue,
String comment, boolean visible) {
+ boolean isAllowNull, long autoIncInitValue, DefaultValue
defaultValue, String comment, boolean visible) {
this.name = name;
this.typeDef = typeDef;
this.isKey = isKey;
this.aggregateType = aggregateType;
this.isAllowNull = isAllowNull;
- this.isAutoInc = isAutoInc;
+ this.isAutoInc = autoIncInitValue != -1;
+ this.autoIncInitValue = autoIncInitValue;
this.defaultValue = defaultValue;
this.comment = comment;
this.visible = visible;
@@ -216,39 +218,39 @@ public class ColumnDef {
public static ColumnDef newDeleteSignColumnDef() {
return new ColumnDef(Column.DELETE_SIGN,
TypeDef.create(PrimitiveType.TINYINT), false, null, false,
- false, new ColumnDef.DefaultValue(true, "0"), "doris delete
flag hidden column", false);
+ -1, new ColumnDef.DefaultValue(true, "0"), "doris delete flag
hidden column", false);
}
public static ColumnDef newDeleteSignColumnDef(AggregateType
aggregateType) {
return new ColumnDef(Column.DELETE_SIGN,
TypeDef.create(PrimitiveType.TINYINT), false, aggregateType, false,
- false, new ColumnDef.DefaultValue(true, "0"), "doris delete
flag hidden column", false);
+ -1, new ColumnDef.DefaultValue(true, "0"), "doris delete flag
hidden column", false);
}
public static ColumnDef newSequenceColumnDef(Type type) {
return new ColumnDef(Column.SEQUENCE_COL, new TypeDef(type), false,
null, true,
- false, DefaultValue.NULL_DEFAULT_VALUE, "sequence column
hidden column", false);
+ -1, DefaultValue.NULL_DEFAULT_VALUE, "sequence column hidden
column", false);
}
public static ColumnDef newSequenceColumnDef(Type type, AggregateType
aggregateType) {
return new ColumnDef(Column.SEQUENCE_COL, new TypeDef(type), false,
- aggregateType, true, false, DefaultValue.NULL_DEFAULT_VALUE,
+ aggregateType, true, -1, DefaultValue.NULL_DEFAULT_VALUE,
"sequence column hidden column", false);
}
public static ColumnDef newRowStoreColumnDef(AggregateType aggregateType) {
return new ColumnDef(Column.ROW_STORE_COL,
TypeDef.create(PrimitiveType.STRING), false,
- aggregateType, false, false,
+ aggregateType, false, -1,
new ColumnDef.DefaultValue(true, ""), "doris row store hidden
column", false);
}
public static ColumnDef newVersionColumnDef() {
- return new ColumnDef(Column.VERSION_COL,
TypeDef.create(PrimitiveType.BIGINT), false, null, false, false,
+ return new ColumnDef(Column.VERSION_COL,
TypeDef.create(PrimitiveType.BIGINT), false, null, false, -1,
new ColumnDef.DefaultValue(true, "0"), "doris version hidden
column", false);
}
public static ColumnDef newVersionColumnDef(AggregateType aggregateType) {
return new ColumnDef(Column.VERSION_COL,
TypeDef.create(PrimitiveType.BIGINT), false, aggregateType, false,
- false, new ColumnDef.DefaultValue(true, "0"), "doris version
hidden column", false);
+ -1, new ColumnDef.DefaultValue(true, "0"), "doris version
hidden column", false);
}
public boolean isAllowNull() {
@@ -560,6 +562,9 @@ public class ColumnDef {
if (isAutoInc) {
sb.append("AUTO_INCREMENT ");
+ sb.append("(");
+ sb.append(autoIncInitValue);
+ sb.append(")");
}
if (defaultValue.isSet) {
@@ -582,7 +587,7 @@ public class ColumnDef {
type = Expr.createAggStateType(genericAggregationName, typeList,
nullableList);
}
- return new Column(name, type, isKey, aggregateType, isAllowNull,
isAutoInc, defaultValue.value, comment,
+ return new Column(name, type, isKey, aggregateType, isAllowNull,
autoIncInitValue, defaultValue.value, comment,
visible, defaultValue.defaultValueExprDef,
Column.COLUMN_UNIQUE_ID_INIT_VALUE, defaultValue.getValue(),
clusterKeyId);
}
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/catalog/AutoIncrementGenerator.java
b/fe/fe-core/src/main/java/org/apache/doris/catalog/AutoIncrementGenerator.java
index ecda8fc9dbe..9528f07e0ca 100644
---
a/fe/fe-core/src/main/java/org/apache/doris/catalog/AutoIncrementGenerator.java
+++
b/fe/fe-core/src/main/java/org/apache/doris/catalog/AutoIncrementGenerator.java
@@ -57,10 +57,11 @@ public class AutoIncrementGenerator implements Writable {
public AutoIncrementGenerator() {
}
- public AutoIncrementGenerator(long dbId, long tableId, long columnId) {
+ public AutoIncrementGenerator(long dbId, long tableId, long columnId, long
nextId) {
this.dbId = dbId;
this.tableId = tableId;
this.columnId = columnId;
+ this.nextId = nextId;
}
public void setEditLog(EditLog editLog) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Column.java
b/fe/fe-core/src/main/java/org/apache/doris/catalog/Column.java
index 395c3169ae4..b4254a2f607 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Column.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Column.java
@@ -74,7 +74,7 @@ public class Column implements Writable, GsonPostProcessable {
private static final String COLUMN_MAP_KEY = "key";
private static final String COLUMN_MAP_VALUE = "value";
- public static final Column UNSUPPORTED_COLUMN = new Column("unknown",
Type.UNSUPPORTED, true, null, true, false,
+ public static final Column UNSUPPORTED_COLUMN = new Column("unknown",
Type.UNSUPPORTED, true, null, true, -1,
null, "invalid", true, null, -1, null);
@SerializedName(value = "name")
@@ -98,6 +98,9 @@ public class Column implements Writable, GsonPostProcessable {
private boolean isAllowNull;
@SerializedName(value = "isAutoInc")
private boolean isAutoInc;
+
+ @SerializedName(value = "autoIncInitValue")
+ private long autoIncInitValue;
@SerializedName(value = "defaultValue")
private String defaultValue;
@SerializedName(value = "comment")
@@ -183,32 +186,32 @@ public class Column implements Writable,
GsonPostProcessable {
public Column(String name, Type type, boolean isKey, AggregateType
aggregateType, boolean isAllowNull,
String defaultValue, String comment) {
- this(name, type, isKey, aggregateType, isAllowNull, false,
defaultValue, comment, true, null,
+ this(name, type, isKey, aggregateType, isAllowNull, -1, defaultValue,
comment, true, null,
COLUMN_UNIQUE_ID_INIT_VALUE, defaultValue, false, null);
}
public Column(String name, Type type, boolean isKey, AggregateType
aggregateType, boolean isAllowNull,
String comment, boolean visible, int colUniqueId) {
- this(name, type, isKey, aggregateType, isAllowNull, false, null,
comment, visible, null, colUniqueId, null,
+ this(name, type, isKey, aggregateType, isAllowNull, -1, null, comment,
visible, null, colUniqueId, null,
false, null);
}
public Column(String name, Type type, boolean isKey, AggregateType
aggregateType, boolean isAllowNull,
String defaultValue, String comment, boolean visible,
DefaultValueExprDef defaultValueExprDef,
int colUniqueId, String realDefaultValue) {
- this(name, type, isKey, aggregateType, isAllowNull, false,
defaultValue, comment, visible, defaultValueExprDef,
+ this(name, type, isKey, aggregateType, isAllowNull, -1, defaultValue,
comment, visible, defaultValueExprDef,
colUniqueId, realDefaultValue, false, null);
}
public Column(String name, Type type, boolean isKey, AggregateType
aggregateType, boolean isAllowNull,
- boolean isAutoInc, String defaultValue, String comment, boolean
visible,
+ long autoIncInitValue, String defaultValue, String comment,
boolean visible,
DefaultValueExprDef defaultValueExprDef, int colUniqueId, String
realDefaultValue) {
- this(name, type, isKey, aggregateType, isAllowNull, isAutoInc,
defaultValue, comment, visible,
+ this(name, type, isKey, aggregateType, isAllowNull, autoIncInitValue,
defaultValue, comment, visible,
defaultValueExprDef, colUniqueId, realDefaultValue, false,
null);
}
public Column(String name, Type type, boolean isKey, AggregateType
aggregateType, boolean isAllowNull,
- boolean isAutoInc, String defaultValue, String comment, boolean
visible,
+ long autoIncInitValue, String defaultValue, String comment,
boolean visible,
DefaultValueExprDef defaultValueExprDef, int colUniqueId, String
realDefaultValue,
boolean hasOnUpdateDefaultValue, DefaultValueExprDef
onUpdateDefaultValueExprDef) {
this.name = name;
@@ -225,7 +228,8 @@ public class Column implements Writable,
GsonPostProcessable {
this.isAggregationTypeImplicit = false;
this.isKey = isKey;
this.isAllowNull = isAllowNull;
- this.isAutoInc = isAutoInc;
+ this.isAutoInc = autoIncInitValue != -1;
+ this.autoIncInitValue = autoIncInitValue;
this.defaultValue = defaultValue;
this.realDefaultValue = realDefaultValue;
this.defaultValueExprDef = defaultValueExprDef;
@@ -251,20 +255,20 @@ public class Column implements Writable,
GsonPostProcessable {
}
public Column(String name, Type type, boolean isKey, AggregateType
aggregateType,
- boolean isAllowNull, boolean isAutoInc, String defaultValue,
String comment,
+ boolean isAllowNull, long autoIncInitValue, String defaultValue,
String comment,
boolean visible, DefaultValueExprDef defaultValueExprDef, int
colUniqueId,
String realDefaultValue, boolean hasOnUpdateDefaultValue,
DefaultValueExprDef onUpdateDefaultValueExprDef, int clusterKeyId)
{
- this(name, type, isKey, aggregateType, isAllowNull, isAutoInc,
defaultValue, comment,
+ this(name, type, isKey, aggregateType, isAllowNull, autoIncInitValue,
defaultValue, comment,
visible, defaultValueExprDef, colUniqueId, realDefaultValue,
hasOnUpdateDefaultValue, onUpdateDefaultValueExprDef);
this.clusterKeyId = clusterKeyId;
}
public Column(String name, Type type, boolean isKey, AggregateType
aggregateType, boolean isAllowNull,
- boolean isAutoInc, String defaultValue, String comment, boolean
visible,
+ long autoIncInitValue, String defaultValue, String comment,
boolean visible,
DefaultValueExprDef defaultValueExprDef, int colUniqueId, String
realDefaultValue, int clusterKeyId) {
- this(name, type, isKey, aggregateType, isAllowNull, isAutoInc,
defaultValue, comment, visible,
+ this(name, type, isKey, aggregateType, isAllowNull, autoIncInitValue,
defaultValue, comment, visible,
defaultValueExprDef, colUniqueId, realDefaultValue);
this.clusterKeyId = clusterKeyId;
}
@@ -1117,6 +1121,10 @@ public class Column implements Writable,
GsonPostProcessable {
return this.uniqueId;
}
+ public long getAutoIncInitValue() {
+ return this.autoIncInitValue;
+ }
+
public void setIndexFlag(TColumn tColumn, OlapTable olapTable) {
List<Index> indexes = olapTable.getIndexes();
for (Index index : indexes) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/OlapTable.java
b/fe/fe-core/src/main/java/org/apache/doris/catalog/OlapTable.java
index 0c0ce320f11..3799c29ca7e 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/OlapTable.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/OlapTable.java
@@ -2470,7 +2470,8 @@ public class OlapTable extends Table implements
MTMVRelatedTableIf {
public void initAutoIncrementGenerator(long dbId) {
for (Column column : fullSchema) {
if (column.isAutoInc()) {
- autoIncrementGenerator = new AutoIncrementGenerator(dbId, id,
column.getUniqueId());
+ autoIncrementGenerator = new AutoIncrementGenerator(dbId, id,
column.getUniqueId(),
+ column.getAutoIncInitValue());
autoIncrementGenerator.setEditLog(Env.getCurrentEnv().getEditLog());
break;
}
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/datasource/InternalCatalog.java
b/fe/fe-core/src/main/java/org/apache/doris/datasource/InternalCatalog.java
index 68a54fd84f2..c091e5b683b 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/datasource/InternalCatalog.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/InternalCatalog.java
@@ -1291,7 +1291,7 @@ public class InternalCatalog implements
CatalogIf<Database> {
ColumnDef columnDef;
if (resultExpr.getSrcSlotRef() == null) {
columnDef = new ColumnDef(name, typeDef, false, null,
- true, false, new DefaultValue(false, null), "");
+ true, -1, new DefaultValue(false, null), "");
} else {
Column column =
resultExpr.getSrcSlotRef().getDesc().getColumn();
boolean setDefault =
StringUtils.isNotBlank(column.getDefaultValue());
@@ -1309,7 +1309,7 @@ public class InternalCatalog implements
CatalogIf<Database> {
defaultValue = new DefaultValue(setDefault,
column.getDefaultValue());
}
columnDef = new ColumnDef(name, typeDef, false, null,
- column.isAllowNull(), false, defaultValue,
column.getComment());
+ column.isAllowNull(), -1, defaultValue,
column.getComment());
}
createTableStmt.addColumnDef(columnDef);
// set first column as default distribution
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcClient.java
b/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcClient.java
index 7851b708d36..07706ace822 100644
---
a/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcClient.java
+++
b/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcClient.java
@@ -517,7 +517,7 @@ public abstract class JdbcClient {
// because for utf8 encoding, a Chinese character takes up 3 bytes
protected int charOctetLength;
protected boolean isAllowNull;
- protected boolean isAutoincrement;
+ protected long autoIncInitValue;
protected String defaultValue;
}
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcMySQLClient.java
b/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcMySQLClient.java
index c86d3aa2e65..3a1807ce24f 100644
---
a/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcMySQLClient.java
+++
b/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcMySQLClient.java
@@ -164,7 +164,7 @@ public class JdbcMySQLClient extends JdbcClient {
field.setRemarks(rs.getString("REMARKS"));
field.setCharOctetLength(rs.getInt("CHAR_OCTET_LENGTH"));
String isAutoincrement = rs.getString("IS_AUTOINCREMENT");
-
field.setAutoincrement("YES".equalsIgnoreCase(isAutoincrement));
+
field.setAutoIncInitValue("YES".equalsIgnoreCase(isAutoincrement) ? 1 : -1);
field.setDefaultValue(rs.getString("COLUMN_DEF"));
tableSchema.add(field);
}
@@ -197,7 +197,7 @@ public class JdbcMySQLClient extends JdbcClient {
}
dorisTableSchema.add(new Column(field.getColumnName(),
jdbcTypeToDoris(field), field.isKey(), null,
- field.isAllowNull(), field.isAutoincrement(),
field.getDefaultValue(), field.getRemarks(),
+ field.isAllowNull(), field.getAutoIncInitValue(),
field.getDefaultValue(), field.getRemarks(),
true, defaultValueExprDef, -1, null));
}
return dorisTableSchema;
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java
b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java
index e427931d5ee..90da5b26b69 100644
---
a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java
+++
b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java
@@ -2495,8 +2495,20 @@ public class LogicalPlanBuilder extends
DorisParserBaseVisitor<Object> {
//comment should remove '\' and '(") at the beginning and end
String comment = ctx.comment != null ?
ctx.comment.getText().substring(1, ctx.comment.getText().length() - 1)
.replace("\\", "") : "";
- boolean isAutoInc = ctx.AUTO_INCREMENT() != null;
- return new ColumnDefinition(colName, colType, isKey, aggType,
!isNotNull, isAutoInc, defaultValue,
+ long autoIncInitValue = -1;
+ if (ctx.AUTO_INCREMENT() != null) {
+ if (ctx.autoIncInitValue != null) {
+ // AUTO_INCREMENT(Value) Value >= 0.
+ autoIncInitValue =
Long.valueOf(ctx.autoIncInitValue.getText());
+ if (autoIncInitValue < 0) {
+ throw new AnalysisException("AUTO_INCREMENT start value
can not be negative.");
+ }
+ } else {
+ // AUTO_INCREMENT default 1.
+ autoIncInitValue = Long.valueOf(1);
+ }
+ }
+ return new ColumnDefinition(colName, colType, isKey, aggType,
!isNotNull, autoIncInitValue, defaultValue,
onUpdateDefaultValue, comment);
}
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/ColumnDefinition.java
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/ColumnDefinition.java
index f6208feb48c..eac17f05d37 100644
---
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/ColumnDefinition.java
+++
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/ColumnDefinition.java
@@ -62,7 +62,7 @@ public class ColumnDefinition {
private final String comment;
private final boolean isVisible;
private boolean aggTypeImplicit = false;
- private boolean isAutoInc = false;
+ private long autoIncInitValue = -1;
private int clusterKeyId = -1;
public ColumnDefinition(String name, DataType type, boolean isKey,
AggregateType aggType, boolean isNullable,
@@ -71,9 +71,9 @@ public class ColumnDefinition {
}
public ColumnDefinition(String name, DataType type, boolean isKey,
AggregateType aggType,
- boolean isNullable, boolean isAutoInc, Optional<DefaultValue>
defaultValue,
+ boolean isNullable, long autoIncInitValue, Optional<DefaultValue>
defaultValue,
Optional<DefaultValue> onUpdateDefaultValue, String comment) {
- this(name, type, isKey, aggType, isNullable, isAutoInc, defaultValue,
onUpdateDefaultValue,
+ this(name, type, isKey, aggType, isNullable, autoIncInitValue,
defaultValue, onUpdateDefaultValue,
comment, true);
}
@@ -96,14 +96,14 @@ public class ColumnDefinition {
* constructor
*/
private ColumnDefinition(String name, DataType type, boolean isKey,
AggregateType aggType,
- boolean isNullable, boolean isAutoInc, Optional<DefaultValue>
defaultValue,
+ boolean isNullable, long autoIncInitValue, Optional<DefaultValue>
defaultValue,
Optional<DefaultValue> onUpdateDefaultValue, String comment,
boolean isVisible) {
this.name = name;
this.type = type;
this.isKey = isKey;
this.aggType = aggType;
this.isNullable = isNullable;
- this.isAutoInc = isAutoInc;
+ this.autoIncInitValue = autoIncInitValue;
this.defaultValue = defaultValue;
this.onUpdateDefaultValue = onUpdateDefaultValue;
this.comment = comment;
@@ -150,10 +150,6 @@ public class ColumnDefinition {
this.clusterKeyId = clusterKeyId;
}
- public boolean isAutoInc() {
- return isAutoInc;
- }
-
private DataType updateCharacterTypeLength(DataType dataType) {
if (dataType instanceof ArrayType) {
return ArrayType.of(updateCharacterTypeLength(((ArrayType)
dataType).getItemType()));
@@ -619,7 +615,7 @@ public class ColumnDefinition {
*/
public Column translateToCatalogStyle() {
Column column = new Column(name, type.toCatalogDataType(), isKey,
aggType, isNullable,
- isAutoInc,
defaultValue.map(DefaultValue::getRawValue).orElse(null), comment, isVisible,
+ autoIncInitValue,
defaultValue.map(DefaultValue::getRawValue).orElse(null), comment, isVisible,
defaultValue.map(DefaultValue::getDefaultValueExprDef).orElse(null),
Column.COLUMN_UNIQUE_ID_INIT_VALUE,
defaultValue.map(DefaultValue::getValue).orElse(null),
onUpdateDefaultValue.isPresent(),
onUpdateDefaultValue.map(DefaultValue::getDefaultValueExprDef).orElse(null),
clusterKeyId);
diff --git
a/regression-test/data/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_10000.out
b/regression-test/data/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_10000.out
index cd251efa814..0a994bca288 100644
---
a/regression-test/data/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_10000.out
+++
b/regression-test/data/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_10000.out
@@ -1,10 +1,10 @@
-- This file is automatically generated. You should know what you did if you
want to edit this
-- !count_max_min --
-10001 10000 0
+10001 10001 1
-- !count_max_min --
-10001 10000 0
+10001 10001 1
-- !count_max_min --
-10001 10000 0
+10001 10001 1
diff --git
a/regression-test/data/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_basic.out
b/regression-test/data/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_basic.out
index a3e4d94187f..00a0d25cb04 100644
---
a/regression-test/data/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_basic.out
+++
b/regression-test/data/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_basic.out
@@ -1,23 +1,23 @@
-- This file is automatically generated. You should know what you did if you
want to edit this
-- !auto_inc_ids --
-0 "Bob" 100
-1 "Alice" 200
-2 "Tom" 300
-3 "Test" 400
-4 "Carter" 500
-5 "Smith" 600
-6 "Beata" 700
-7 "Doris" 800
-8 "Nereids" 900
+1 "Bob" 100
+2 "Alice" 200
+3 "Tom" 300
+4 "Test" 400
+5 "Carter" 500
+6 "Smith" 600
+7 "Beata" 700
+8 "Doris" 800
+9 "Nereids" 900
-- !auto_inc_ids --
-"Bob" 100 0
-"Alice" 200 1
-"Tom" 300 2
-"Test" 400 3
-"Carter" 500 4
-"Smith" 600 5
-"Beata" 700 6
-"Doris" 800 7
-"Nereids" 900 8
+"Bob" 100 1
+"Alice" 200 2
+"Tom" 300 3
+"Test" 400 4
+"Carter" 500 5
+"Smith" 600 6
+"Beata" 700 7
+"Doris" 800 8
+"Nereids" 900 9
diff --git
a/regression-test/data/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_10000.out
b/regression-test/data/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_start_value_10000.out
similarity index 74%
copy from
regression-test/data/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_10000.out
copy to
regression-test/data/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_start_value_10000.out
index cd251efa814..1cb103a1816 100644
---
a/regression-test/data/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_10000.out
+++
b/regression-test/data/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_start_value_10000.out
@@ -1,10 +1,10 @@
-- This file is automatically generated. You should know what you did if you
want to edit this
-- !count_max_min --
-10001 10000 0
+10001 20000 10000
-- !count_max_min --
-10001 10000 0
+10001 20000 10000
-- !count_max_min --
-10001 10000 0
+10001 20000 10000
diff --git
a/regression-test/data/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_start_value_basic.out
b/regression-test/data/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_start_value_basic.out
new file mode 100644
index 00000000000..f75a589fe71
--- /dev/null
+++
b/regression-test/data/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_start_value_basic.out
@@ -0,0 +1,34 @@
+-- This file is automatically generated. You should know what you did if you
want to edit this
+-- !auto_inc_ids --
+10000 "Bob" 100
+10001 "Alice" 200
+10002 "Tom" 300
+10003 "Test" 400
+10004 "Carter" 500
+10005 "Smith" 600
+10006 "Beata" 700
+10007 "Doris" 800
+10008 "Nereids" 900
+
+-- !auto_inc_ids --
+"Bob" 100 10000
+"Alice" 200 10001
+"Tom" 300 10002
+"Test" 400 10003
+"Carter" 500 10004
+"Smith" 600 10005
+"Beata" 700 10006
+"Doris" 800 10007
+"Nereids" 900 10008
+
+-- !auto_inc_ids --
+0 "Bob" 100
+1 "Alice" 200
+2 "Tom" 300
+3 "Test" 400
+4 "Carter" 500
+5 "Smith" 600
+6 "Beata" 700
+7 "Doris" 800
+8 "Nereids" 900
+
diff --git
a/regression-test/data/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_start_value_col.out
b/regression-test/data/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_start_value_col.out
new file mode 100644
index 00000000000..f1dbbd892ff
--- /dev/null
+++
b/regression-test/data/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_start_value_col.out
@@ -0,0 +1,23 @@
+-- This file is automatically generated. You should know what you did if you
want to edit this
+-- !desc --
+id BIGINT No true \N AUTO_INCREMENT
+value INT No false \N NONE
+
+-- !desc --
+id INT No true \N
+value BIGINT No false \N NONE,AUTO_INCREMENT
+
+-- !sql --
+1
+1
+5
+5
+9
+9
+13
+13
+13
+13
+17
+17
+
diff --git
a/regression-test/data/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_start_value_with_null.out
b/regression-test/data/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_start_value_with_null.out
new file mode 100644
index 00000000000..e539b75ee22
--- /dev/null
+++
b/regression-test/data/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_start_value_with_null.out
@@ -0,0 +1,72 @@
+-- This file is automatically generated. You should know what you did if you
want to edit this
+-- !auto_inc_ids --
+4 "Carter" 500
+5 "Smith" 600
+6 "Beata" 700
+7 "Doris" 800
+8 "Nereids" 900
+10000 "Bob" 100
+10001 "Alice" 200
+10002 "Tom" 300
+10003 "Test" 400
+
+-- !auto_inc_ids --
+10 "Bob" 100
+20 "Tom" 300
+30 "Carter" 500
+40 "Beata" 700
+50 "Nereids" 900
+10000 "Alice" 200
+10001 "Test" 400
+10002 "Smith" 600
+10003 "Doris" 800
+
+-- !auto_inc_ids --
+ "Carter" 500 4
+ "Smith" 600 5
+ "Beata" 700 6
+ "Doris" 800 7
+ "Nereids" 900 8
+ "Bob" 100 10000
+ "Alice" 200 10001
+ "Tom" 300 10002
+ "Test" 400 10003
+
+-- !auto_inc_ids --
+ "Bob" 100 10
+ "Tom" 300 20
+ "Carter" 500 30
+ "Beata" 700 40
+ "Nereids" 900 50
+ "Alice" 200 10000
+ "Test" 400 10001
+ "Smith" 600 10002
+ "Doris" 800 10003
+
+-- !sql --
+10000 Bob 100
+10001 Alice 200
+10002 Tom 300
+10003 Test 400
+10004 Carter 500
+10005 Smith 600
+10006 Beata 700
+10007 Doris 800
+10008 Nereids 900
+
+-- !sql --
+10000 0
+10001 1
+10002 2
+10003 3
+10004 4
+10005 5
+
+-- !sql --
+0 10000
+1 10001
+2 10002
+3 10003
+4 10004
+5 10005
+
diff --git
a/regression-test/data/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_with_null.out
b/regression-test/data/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_with_null.out
index b44ffca5eef..ce87491580e 100644
---
a/regression-test/data/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_with_null.out
+++
b/regression-test/data/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_with_null.out
@@ -1,20 +1,20 @@
-- This file is automatically generated. You should know what you did if you
want to edit this
-- !auto_inc_ids --
-0 "Bob" 100
-1 "Alice" 200
-2 "Tom" 300
-3 "Test" 400
+1 "Bob" 100
+2 "Alice" 200
+3 "Tom" 300
4 "Carter" 500
+4 "Test" 400
5 "Smith" 600
6 "Beata" 700
7 "Doris" 800
8 "Nereids" 900
-- !auto_inc_ids --
-0 "Alice" 200
-1 "Test" 400
-2 "Smith" 600
-3 "Doris" 800
+1 "Alice" 200
+2 "Test" 400
+3 "Smith" 600
+4 "Doris" 800
10 "Bob" 100
20 "Tom" 300
30 "Carter" 500
@@ -22,21 +22,21 @@
50 "Nereids" 900
-- !auto_inc_ids --
- "Bob" 100 0
- "Alice" 200 1
- "Tom" 300 2
- "Test" 400 3
+ "Bob" 100 1
+ "Alice" 200 2
+ "Tom" 300 3
"Carter" 500 4
+ "Test" 400 4
"Smith" 600 5
"Beata" 700 6
"Doris" 800 7
"Nereids" 900 8
-- !auto_inc_ids --
- "Alice" 200 0
- "Test" 400 1
- "Smith" 600 2
- "Doris" 800 3
+ "Alice" 200 1
+ "Test" 400 2
+ "Smith" 600 3
+ "Doris" 800 4
"Bob" 100 10
"Tom" 300 20
"Carter" 500 30
@@ -44,29 +44,29 @@
"Nereids" 900 50
-- !sql --
-0 Bob 100
-1 Alice 200
-2 Tom 300
-3 Test 400
-4 Carter 500
-5 Smith 600
-6 Beata 700
-7 Doris 800
-8 Nereids 900
+1 Bob 100
+2 Alice 200
+3 Tom 300
+4 Test 400
+5 Carter 500
+6 Smith 600
+7 Beata 700
+8 Doris 800
+9 Nereids 900
-- !sql --
-0 0
-1 1
-2 2
-3 3
-4 4
-5 5
+1 0
+2 1
+3 2
+4 3
+5 4
+6 5
-- !sql --
-0 0
-1 1
-2 2
-3 3
-4 4
-5 5
+0 1
+1 2
+2 3
+3 4
+4 5
+5 6
diff --git
a/regression-test/data/data_model_p0/unique/auto_inc_partial_update1.csv
b/regression-test/data/data_model_p0/unique/auto_inc_partial_update1.csv
index da1cfb4175e..21c09c02b0d 100644
--- a/regression-test/data/data_model_p0/unique/auto_inc_partial_update1.csv
+++ b/regression-test/data/data_model_p0/unique/auto_inc_partial_update1.csv
@@ -1,5 +1,5 @@
-0, 123
-2, 323
-4, 523
-6, 723
-8, 923
\ No newline at end of file
+1, 123
+3, 323
+5, 523
+7, 723
+9, 923
\ No newline at end of file
diff --git
a/regression-test/data/data_model_p0/unique/test_unique_table_auto_inc.out
b/regression-test/data/data_model_p0/unique/test_unique_table_auto_inc.out
index 92328005edd..1fad44b368f 100644
--- a/regression-test/data/data_model_p0/unique/test_unique_table_auto_inc.out
+++ b/regression-test/data/data_model_p0/unique/test_unique_table_auto_inc.out
@@ -1,53 +1,53 @@
-- This file is automatically generated. You should know what you did if you
want to edit this
-- !sql --
-0 Bob 100
-1 Alice 200
-2 Tom 300
-3 Test 400
-4 Carter 500
-5 Smith 600
-6 Beata 700
-7 Doris 800
-8 Nereids 900
+1 Bob 100
+2 Alice 200
+3 Tom 300
+4 Test 400
+5 Carter 500
+6 Smith 600
+7 Beata 700
+8 Doris 800
+9 Nereids 900
-- !sql --
0 Bob 123
-1 Alice 200
+1 Bob 100
2 Tom 323
-3 Test 400
+3 Tom 300
4 Carter 523
-5 Smith 600
-6 Beata 700
-7 Doris 800
-8 Nereids 900
+5 Carter 500
+6 Smith 600
+7 Beata 700
+8 Doris 800
+9 Nereids 900
-- !sql --
-0 Bob 100
-1 Alice 200
-2 Tom 300
-3 Test 400
-4 Carter 500
-5 Smith 600
-6 Beata 700
-7 Doris 800
-8 Nereids 900
+1 Bob 100
+2 Alice 200
+3 Tom 300
+4 Test 400
+5 Carter 500
+6 Smith 600
+7 Beata 700
+8 Doris 800
+9 Nereids 900
-- !sql --
-1 Alice 200
-3 Test 400
-5 Smith 600
-6 Beata 700
-7 Doris 800
-8 Nereids 900
+2 Alice 200
+4 Test 400
+6 Smith 600
+7 Beata 700
+8 Doris 800
+9 Nereids 900
1230 Bob 100
1232 Tom 300
1234 Carter 500
-- !sql --
-0 Bob 100
-1 Alice 200
-2 Tom 300
-3 Test 400
+1 Bob 100
+2 Alice 200
+3 Tom 300
4 Carter 500
5 Smith 600
6 Beata 700
@@ -56,9 +56,9 @@
-- !sql --
0 Bob 123
-1 Alice 200
+1 Bob 100
2 Tom 323
-3 Test 400
+3 Tom 300
4 Carter 523
5 Smith 600
6 Beata 700
@@ -73,43 +73,43 @@
4 Nereids 900
-- !partial_update_key --
-0 Bob 100
-1 Alice 200
-2 Tom 300
-3 Test 400
-4 Carter 500
-5 Smith 600
-6 Beata 700
-7 Doris 800
-8 Nereids 900
+1 Bob 100
+2 Alice 200
+3 Tom 300
+4 Test 400
+5 Carter 500
+6 Smith 600
+7 Beata 700
+8 Doris 800
+9 Nereids 900
-- !partial_update_key --
-0 Bob 123
-1 Alice 200
-2 Tom 323
-3 Test 400
-4 Carter 523
-5 Smith 600
-6 Beata 723
-7 Doris 800
-8 Nereids 923
+1 Bob 123
+2 Alice 200
+3 Tom 323
+4 Test 400
+5 Carter 523
+6 Smith 600
+7 Beata 723
+8 Doris 800
+9 Nereids 923
-- !partial_update_value --
-Bob 100 0
-Alice 200 1
-Tom 300 2
-Test 400 3
-Carter 500 4
-Smith 600 5
-Beata 700 6
-Doris 800 7
-Nereids 900 8
+Bob 100 1
+Alice 200 2
+Tom 300 3
+Test 400 4
+Carter 500 5
+Smith 600 6
+Beata 700 7
+Doris 800 8
+Nereids 900 9
-- !partial_update_value --
-Alice 200 1
-Test 400 3
-Smith 600 5
-Doris 800 7
+Alice 200 2
+Test 400 4
+Smith 600 6
+Doris 800 8
Bob 100 9990
Tom 300 9992
Carter 500 9994
@@ -117,41 +117,41 @@ Beata 700 9996
Nereids 900 9998
-- !partial_update_value --
-Bob 100 0
-Alice 200 1
-Tom 300 2
-Test 400 3
-Carter 500 4
-Smith 600 5
-Beata 700 6
-Doris 800 7
-Nereids 900 8
+Bob 100 1
+Alice 200 2
+Tom 300 3
+Test 400 4
+Carter 500 5
+Smith 600 6
+Beata 700 7
+Doris 800 8
+Nereids 900 9
-- !partial_update_value --
-Bob 9990 0
-Alice 200 1
-Tom 9992 2
-Test 400 3
-Carter 9994 4
-Smith 600 5
-Beata 9996 6
-Doris 800 7
-Nereids 9998 8
+Bob 9990 1
+Alice 200 2
+Tom 9992 3
+Test 400 4
+Carter 9994 5
+Smith 600 6
+Beata 9996 7
+Doris 800 8
+Nereids 9998 9
-- !sql --
-0 a
-1 b
-2 c
+1 a
+2 b
+3 c
-- !sql --
-0 10
-1 20
-2 30
+1 10
+2 20
+3 30
-- !sql --
-0 a 10
-1 b 20
-2 c 30
+1 a 10
+2 b 20
+3 c 30
-- !sql --
0 AFRICA lar deposits. blithely final packages cajole. regular waters
are final requests. regular accounts are according to
diff --git
a/regression-test/data/external_table_p0/hive/test_autoinc_broker_load.out
b/regression-test/data/external_table_p0/hive/test_autoinc_broker_load.out
index cf14e8ea6a8..1ab1d59c557 100644
--- a/regression-test/data/external_table_p0/hive/test_autoinc_broker_load.out
+++ b/regression-test/data/external_table_p0/hive/test_autoinc_broker_load.out
@@ -1,31 +1,31 @@
-- This file is automatically generated. You should know what you did if you
want to edit this
-- !sql --
-0 Bob 100
-1 Alice 200
-2 Tom 300
-3 Test 400
-4 Carter 500
-5 Smith 600
-6 Beata 700
-7 Doris 800
-8 Nereids 900
+1 Bob 100
+2 Alice 200
+3 Tom 300
+4 Test 400
+5 Carter 500
+6 Smith 600
+7 Beata 700
+8 Doris 800
+9 Nereids 900
-- !sql --
0 Bob 123
-1 Alice 200
+1 Bob 100
2 Tom 323
-3 Test 400
+3 Tom 300
4 Carter 523
-5 Smith 600
-6 Beata 700
-7 Doris 800
-8 Nereids 900
+5 Carter 500
+6 Smith 600
+7 Beata 700
+8 Doris 800
+9 Nereids 900
-- !sql --
-0 Bob 100
-1 Alice 200
-2 Tom 300
-3 Test 400
+1 Bob 100
+2 Alice 200
+3 Tom 300
4 Carter 500
5 Smith 600
6 Beata 700
@@ -34,9 +34,9 @@
-- !sql --
0 Bob 123
-1 Alice 200
+1 Bob 100
2 Tom 323
-3 Test 400
+3 Tom 300
4 Carter 523
5 Smith 600
6 Beata 700
diff --git
a/regression-test/suites/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_col.groovy
b/regression-test/suites/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_col.groovy
index 546677b8f17..346fa63ffa7 100644
---
a/regression-test/suites/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_col.groovy
+++
b/regression-test/suites/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_col.groovy
@@ -218,4 +218,27 @@ PROPERTIES (
"""
exception "the auto increment column can't have default value."
}
+
+ sql "drop table if exists ${table_check}"
+ try {
+ sql """
+ CREATE TABLE IF NOT EXISTS `${table_check}` (
+ AUTO_INCREMENT BIGINT NOT NULL AUTO_INCREMENT COMMENT "",
+ VALUE int(11) NOT NULL COMMENT ""
+ ) ENGINE=OLAP
+ DUPLICATE KEY(AUTO_INCREMENT)
+ COMMENT "OLAP"
+ DISTRIBUTED BY HASH(AUTO_INCREMENT) BUCKETS 1
+ PROPERTIES (
+ "replication_allocation" = "tag.location.default: 1",
+ "in_memory" = "false",
+ "storage_format" = "V2"
+ )
+ """
+ } catch (Exception e){
+ // no exception
+ assertTrue(false)
+ } finally{
+
+ }
}
\ No newline at end of file
diff --git
a/regression-test/suites/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_start_value_10000.groovy
b/regression-test/suites/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_start_value_10000.groovy
new file mode 100644
index 00000000000..ad4115840e8
--- /dev/null
+++
b/regression-test/suites/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_start_value_10000.groovy
@@ -0,0 +1,115 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_dup_table_auto_inc_start_value_10000") {
+
+ // auto-increment column is key
+ def table1 = "test_dup_tab_auto_inc_start_value_10000_key"
+ sql "drop table if exists ${table1}"
+ sql """
+ CREATE TABLE IF NOT EXISTS `${table1}` (
+ `id` BIGINT NOT NULL AUTO_INCREMENT(10000) COMMENT "用户 ID",
+ `x` int(11) NOT NULL COMMENT "",
+ `y` int(11) NOT NULL COMMENT ""
+ ) ENGINE=OLAP
+ DUPLICATE KEY(`id`)
+ COMMENT "OLAP"
+ DISTRIBUTED BY HASH(`id`) BUCKETS 1
+ PROPERTIES (
+ "replication_allocation" = "tag.location.default: 1",
+ "in_memory" = "false",
+ "storage_format" = "V2"
+ )
+ """
+ streamLoad {
+ table "${table1}"
+
+ set 'column_separator', ','
+ set 'format', 'csv'
+ set 'columns', 'x, y'
+
+ file 'auto_inc_10000.csv'
+ time 10000 // limit inflight 10s
+ }
+ sql "sync"
+ qt_count_max_min "select count(distinct id), max(id), min(id) from
${table1};"
+ sql "drop table if exists ${table1};"
+
+ // auto-increment column is value
+ def table2 = "test_dup_tab_auto_inc_start_value_10000_value"
+ sql "drop table if exists ${table2}"
+ sql """
+ CREATE TABLE IF NOT EXISTS `${table2}` (
+ `x` int(11) NOT NULL COMMENT "",
+ `y` int(11) NOT NULL COMMENT "",
+ `id` BIGINT NOT NULL AUTO_INCREMENT(10000) COMMENT "用户 ID"
+ ) ENGINE=OLAP
+ DUPLICATE KEY(`x`, `y`)
+ COMMENT "OLAP"
+ DISTRIBUTED BY HASH(`x`, `y`) BUCKETS 1
+ PROPERTIES (
+ "replication_allocation" = "tag.location.default: 1",
+ "in_memory" = "false",
+ "storage_format" = "V2"
+ )
+ """
+ streamLoad {
+ table "${table2}"
+
+ set 'column_separator', ','
+ set 'format', 'csv'
+ set 'columns', 'x, y'
+
+ file 'auto_inc_10000.csv'
+ time 10000 // limit inflight 10s
+ }
+ sql "sync"
+ qt_count_max_min "select count(distinct id), max(id), min(id) from
${table2};"
+ sql "drop table if exists ${table2};"
+
+ sql "set batch_size = 4096;"
+ def table3 = "test_dup_tab_auto_inc_start_value_10000_key_2"
+ sql "drop table if exists ${table3}"
+ sql """
+ CREATE TABLE IF NOT EXISTS `${table3}` (
+ `id` BIGINT NOT NULL AUTO_INCREMENT(10000) COMMENT "用户 ID",
+ `x` int(11) NOT NULL COMMENT "",
+ `y` int(11) NOT NULL COMMENT ""
+ ) ENGINE=OLAP
+ DUPLICATE KEY(`id`)
+ COMMENT "OLAP"
+ DISTRIBUTED BY HASH(`id`) BUCKETS 1
+ PROPERTIES (
+ "replication_allocation" = "tag.location.default: 1",
+ "in_memory" = "false",
+ "storage_format" = "V2"
+ )
+ """
+ streamLoad {
+ table "${table3}"
+
+ set 'column_separator', ','
+ set 'format', 'csv'
+ set 'columns', 'x, y'
+
+ file 'auto_inc_10000.csv'
+ time 10000 // limit inflight 10s
+ }
+ sql "sync"
+ qt_count_max_min "select count(distinct id), max(id), min(id) from
${table3};"
+ sql "drop table if exists ${table3};"
+}
diff --git
a/regression-test/suites/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_start_value_basic.groovy
b/regression-test/suites/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_start_value_basic.groovy
new file mode 100644
index 00000000000..07cefae8d00
--- /dev/null
+++
b/regression-test/suites/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_start_value_basic.groovy
@@ -0,0 +1,114 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_dup_table_auto_inc_start_value_basic") {
+
+ // auto-increment column is key
+ def table1 = "test_dup_tab_auto_inc_col_start_value_basic_key"
+ sql "drop table if exists ${table1}"
+ sql """
+ CREATE TABLE IF NOT EXISTS `${table1}` (
+ `id` BIGINT NOT NULL AUTO_INCREMENT(10000) COMMENT "用户 ID",
+ `name` varchar(65533) NOT NULL COMMENT "用户姓名",
+ `value` int(11) NOT NULL COMMENT "用户得分"
+ ) ENGINE=OLAP
+ DUPLICATE KEY(`id`)
+ COMMENT "OLAP"
+ DISTRIBUTED BY HASH(`id`) BUCKETS 1
+ PROPERTIES (
+ "replication_allocation" = "tag.location.default: 1",
+ "in_memory" = "false",
+ "storage_format" = "V2"
+ )
+ """
+ streamLoad {
+ table "${table1}"
+
+ set 'column_separator', ','
+ set 'format', 'csv'
+ set 'columns', 'name, value'
+
+ file 'auto_inc_basic.csv'
+ time 10000 // limit inflight 10s
+ }
+ qt_auto_inc_ids "select * from ${table1};"
+ sql "drop table if exists ${table1};"
+
+
+ // auto-increment column is value
+ def table2 = "test_dup_tab_auto_inc_col_start_value_basic_value"
+ sql "drop table if exists ${table2}"
+ sql """
+ CREATE TABLE IF NOT EXISTS `${table2}` (
+ `name` varchar(65533) NOT NULL COMMENT "用户姓名",
+ `value` int(11) NOT NULL COMMENT "用户得分",
+ `id` BIGINT NOT NULL AUTO_INCREMENT(10000) COMMENT "用户 ID"
+ ) ENGINE=OLAP
+ DUPLICATE KEY(`name`, `value`)
+ COMMENT "OLAP"
+ DISTRIBUTED BY HASH(`name`, `value`) BUCKETS 1
+ PROPERTIES (
+ "replication_allocation" = "tag.location.default: 1",
+ "in_memory" = "false",
+ "storage_format" = "V2"
+ )
+ """
+ streamLoad {
+ table "${table2}"
+
+ set 'column_separator', ','
+ set 'format', 'csv'
+ set 'columns', 'name, value'
+
+ file 'auto_inc_basic.csv'
+ time 10000 // limit inflight 10s
+ }
+ qt_auto_inc_ids "select * from ${table2} order by id;"
+ sql "drop table if exists ${table2};"
+
+ // auto-increment start value can be 0
+ def table3 = "test_dup_tab_auto_inc_col_start_value_0_basic_key"
+ sql "drop table if exists ${table3}"
+ sql """
+ CREATE TABLE IF NOT EXISTS `${table3}` (
+ `id` BIGINT NOT NULL AUTO_INCREMENT(0) COMMENT "用户 ID",
+ `name` varchar(65533) NOT NULL COMMENT "用户姓名",
+ `value` int(11) NOT NULL COMMENT "用户得分"
+ ) ENGINE=OLAP
+ DUPLICATE KEY(`id`)
+ COMMENT "OLAP"
+ DISTRIBUTED BY HASH(`id`) BUCKETS 1
+ PROPERTIES (
+ "replication_allocation" = "tag.location.default: 1",
+ "in_memory" = "false",
+ "storage_format" = "V2"
+ )
+ """
+ streamLoad {
+ table "${table3}"
+
+ set 'column_separator', ','
+ set 'format', 'csv'
+ set 'columns', 'name, value'
+
+ file 'auto_inc_basic.csv'
+ time 10000 // limit inflight 10s
+ }
+ qt_auto_inc_ids "select * from ${table3};"
+ sql "drop table if exists ${table3};"
+
+}
\ No newline at end of file
diff --git
a/regression-test/suites/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_col.groovy
b/regression-test/suites/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_start_value_col.groovy
similarity index 73%
copy from
regression-test/suites/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_col.groovy
copy to
regression-test/suites/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_start_value_col.groovy
index 546677b8f17..68df88e3da5 100644
---
a/regression-test/suites/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_col.groovy
+++
b/regression-test/suites/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_start_value_col.groovy
@@ -15,13 +15,13 @@
// specific language governing permissions and limitations
// under the License.
-suite("test_dup_table_auto_inc_col") {
+suite("test_dup_table_auto_inc_start_value_col") {
- def table1 = "test_dup_tab_auto_inc_col1"
+ def table1 = "test_dup_tab_auto_inc_start_value_col1"
sql "drop table if exists ${table1}"
sql """
CREATE TABLE IF NOT EXISTS `${table1}` (
- `id` BIGINT NOT NULL AUTO_INCREMENT COMMENT "",
+ `id` BIGINT NOT NULL AUTO_INCREMENT(10000) COMMENT "",
`value` int(11) NOT NULL COMMENT ""
) ENGINE=OLAP
DUPLICATE KEY(`id`)
@@ -38,12 +38,12 @@ suite("test_dup_table_auto_inc_col") {
assertTrue(res.size() != 0)
// duplicate table with a auto-increment value column
- def table2 = "test_dup_tab_auto_inc_col2"
+ def table2 = "test_dup_tab_auto_inc_start_value_col2"
sql "drop table if exists ${table2}"
sql """
CREATE TABLE IF NOT EXISTS `${table2}` (
`id` int(11) NOT NULL COMMENT "",
- `value` BIGINT NOT NULL AUTO_INCREMENT COMMENT ""
+ `value` BIGINT NOT NULL AUTO_INCREMENT(10000) COMMENT ""
) ENGINE=OLAP
DUPLICATE KEY(`id`)
COMMENT "OLAP"
@@ -59,13 +59,13 @@ suite("test_dup_table_auto_inc_col") {
assertTrue(res.size() != 0)
// duplicate table with two auto-increment columns
- def table3 = "test_dup_tab_auto_inc_col3"
+ def table3 = "test_dup_tab_auto_inc_start_value_col3"
sql "drop table if exists ${table3}"
test {
sql """
CREATE TABLE IF NOT EXISTS `${table3}` (
- `id` BIGINT NOT NULL AUTO_INCREMENT COMMENT "",
- `value` BIGINT NOT NULL AUTO_INCREMENT COMMENT ""
+ `id` BIGINT NOT NULL AUTO_INCREMENT(10000) COMMENT "",
+ `value` BIGINT NOT NULL AUTO_INCREMENT(10000) COMMENT ""
) ENGINE=OLAP
DUPLICATE KEY(`id`)
COMMENT "OLAP"
@@ -81,11 +81,11 @@ suite("test_dup_table_auto_inc_col") {
// duplicate table with a auto-increment key column
// insert values and check query
- def table4 = "test_dup_tab_basic_int_tab1"
+ def table4 = "test_dup_tab_start_value_basic_int_tab1"
sql "drop table if exists ${table4}"
sql """
CREATE TABLE IF NOT EXISTS `${table4}` (
- `siteid` BIGINT NOT NULL AUTO_INCREMENT COMMENT "",
+ `siteid` BIGINT NOT NULL AUTO_INCREMENT(10000) COMMENT "",
`citycode` int(11) NOT NULL COMMENT "",
`userid` int(11) NOT NULL COMMENT "",
`pv` int(11) NOT NULL COMMENT ""
@@ -122,12 +122,12 @@ PROPERTIES (
// duplicate table with a auto-increment value column
// insert values and check query
- def table5 = "test_dup_tab_basic_int_tab2"
+ def table5 = "test_dup_tab_start_value_basic_int_tab2"
sql "drop table if exists ${table5}"
sql """
CREATE TABLE IF NOT EXISTS `${table5}` (
`siteid` int(11) NOT NULL COMMENT "",
- `citycode` BIGINT NOT NULL AUTO_INCREMENT COMMENT "",
+ `citycode` BIGINT NOT NULL AUTO_INCREMENT(10000) COMMENT "",
`userid` int(11) NOT NULL COMMENT "",
`pv` int(11) NOT NULL COMMENT ""
) ENGINE=OLAP
@@ -160,13 +160,13 @@ PROPERTIES (
sql """select citycode from ${table5} order by citycode"""
sql "drop table if exists ${table5}"
- def table_check = "test_dup_tab_auto_inc_col_check"
+ def table_check = "test_dup_tab_auto_inc_start_value_col_check"
sql "drop table if exists ${table_check}"
test {
sql """
CREATE TABLE IF NOT EXISTS `${table_check}` (
- `id` BIGINT AUTO_INCREMENT COMMENT "",
+ `id` BIGINT AUTO_INCREMENT(10000) COMMENT "",
`value` int(11) NOT NULL COMMENT ""
) ENGINE=OLAP
DUPLICATE KEY(`id`)
@@ -185,7 +185,7 @@ PROPERTIES (
test {
sql """
CREATE TABLE IF NOT EXISTS `${table_check}` (
- `id` VARCHAR NOT NULL AUTO_INCREMENT COMMENT "",
+ `id` VARCHAR NOT NULL AUTO_INCREMENT(10000) COMMENT "",
`value` int(11) NOT NULL COMMENT ""
) ENGINE=OLAP
DUPLICATE KEY(`id`)
@@ -218,4 +218,46 @@ PROPERTIES (
"""
exception "the auto increment column can't have default value."
}
+
+ sql "drop table if exists ${table_check}"
+ test {
+ sql """
+ CREATE TABLE IF NOT EXISTS `${table_check}` (
+ `id` BIGINT NOT NULL AUTO_INCREMENT(-1) COMMENT "",
+ `value` int(11) NOT NULL COMMENT ""
+ ) ENGINE=OLAP
+ DUPLICATE KEY(`id`)
+ COMMENT "OLAP"
+ DISTRIBUTED BY HASH(`id`) BUCKETS 1
+ PROPERTIES (
+ "replication_allocation" = "tag.location.default: 1",
+ "in_memory" = "false",
+ "storage_format" = "V2"
+ )
+ """
+ exception "AUTO_INCREMENT start value can not be negative."
+ }
+
+ sql "drop table if exists ${table_check}"
+ try {
+ sql """
+ CREATE TABLE IF NOT EXISTS `${table_check}` (
+ AUTO_INCREMENT BIGINT NOT NULL AUTO_INCREMENT(100) COMMENT "",
+ VALUE int(11) NOT NULL COMMENT ""
+ ) ENGINE=OLAP
+ DUPLICATE KEY(AUTO_INCREMENT)
+ COMMENT "OLAP"
+ DISTRIBUTED BY HASH(AUTO_INCREMENT) BUCKETS 1
+ PROPERTIES (
+ "replication_allocation" = "tag.location.default: 1",
+ "in_memory" = "false",
+ "storage_format" = "V2"
+ )
+ """
+ } catch (Exception e){
+ // no exception
+ assertTrue(false)
+ } finally{
+
+ }
}
\ No newline at end of file
diff --git
a/regression-test/suites/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_start_value_with_null.groovy
b/regression-test/suites/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_start_value_with_null.groovy
new file mode 100644
index 00000000000..8b14942c95f
--- /dev/null
+++
b/regression-test/suites/data_model_p0/duplicate/storage/test_dup_tab_auto_inc_start_value_with_null.groovy
@@ -0,0 +1,235 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_dup_table_auto_inc_start_value_with_null") {
+
+ // auto-increment column is key, don't specify auto-inc column in stream
load
+ def table1 = "test_dup_table_auto_inc_start_value_basic_key_with_null"
+ sql "drop table if exists ${table1}"
+ sql """
+ CREATE TABLE IF NOT EXISTS `${table1}` (
+ `id` BIGINT NOT NULL AUTO_INCREMENT(10000) COMMENT "用户 ID",
+ `name` varchar(65533) NOT NULL COMMENT "用户姓名",
+ `value` int(11) NOT NULL COMMENT "用户得分"
+ ) ENGINE=OLAP
+ DUPLICATE KEY(`id`)
+ COMMENT "OLAP"
+ DISTRIBUTED BY HASH(`id`) BUCKETS 1
+ PROPERTIES (
+ "replication_allocation" = "tag.location.default: 1",
+ "in_memory" = "false",
+ "storage_format" = "V2"
+ )
+ """
+ streamLoad {
+ table "${table1}"
+
+ set 'column_separator', ','
+ set 'format', 'csv'
+ set 'columns', 'id, name, value'
+
+ file 'auto_inc_basic_with_null.csv'
+ time 10000 // limit inflight 10s
+ }
+ sql """sync"""
+ qt_auto_inc_ids "select * from ${table1};"
+ sql "drop table if exists ${table1};"
+
+
+ // auto-increment column is key, some of the values is null, some is valid
value
+ def table2 = "test_dup_table_auto_inc_start_value_basic_key_with_null_2"
+ sql "drop table if exists ${table2}"
+ sql """
+ CREATE TABLE IF NOT EXISTS `${table2}` (
+ `id` BIGINT NOT NULL AUTO_INCREMENT(10000) COMMENT "用户 ID",
+ `name` varchar(65533) NOT NULL COMMENT "用户姓名",
+ `value` int(11) NOT NULL COMMENT "用户得分"
+ ) ENGINE=OLAP
+ DUPLICATE KEY(`id`)
+ COMMENT "OLAP"
+ DISTRIBUTED BY HASH(`id`) BUCKETS 1
+ PROPERTIES (
+ "replication_allocation" = "tag.location.default: 1",
+ "in_memory" = "false",
+ "storage_format" = "V2"
+ )
+ """
+ streamLoad {
+ table "${table2}"
+
+ set 'column_separator', ','
+ set 'format', 'csv'
+ set 'columns', 'id, name, value'
+
+ file 'auto_inc_basic_with_null_2.csv'
+ time 10000 // limit inflight 10s
+ }
+ sql """sync"""
+ qt_auto_inc_ids "select * from ${table2};"
+ sql "drop table if exists ${table2};"
+
+
+ // auto-increment column is value, don't specify auto-inc column in stream
load
+ def table3 = "test_dup_table_auto_inc_start_value_basic_value_with_null"
+ sql "drop table if exists ${table3}"
+ sql """
+ CREATE TABLE IF NOT EXISTS `${table3}` (
+ `name` varchar(65533) NOT NULL COMMENT "用户姓名",
+ `value` int(11) NOT NULL COMMENT "用户得分",
+ `id` BIGINT NOT NULL AUTO_INCREMENT(10000) COMMENT "用户 ID"
+ ) ENGINE=OLAP
+ DUPLICATE KEY(`name`, `value`)
+ COMMENT "OLAP"
+ DISTRIBUTED BY HASH(`name`, `value`) BUCKETS 1
+ PROPERTIES (
+ "replication_allocation" = "tag.location.default: 1",
+ "in_memory" = "false",
+ "storage_format" = "V2"
+ )
+ """
+ streamLoad {
+ table "${table3}"
+
+ set 'column_separator', ','
+ set 'format', 'csv'
+ set 'columns', 'id, name, value'
+
+ file 'auto_inc_basic_with_null.csv'
+ time 10000 // limit inflight 10s
+ }
+ sql """sync"""
+ qt_auto_inc_ids "select * from ${table3} order by id;"
+ sql "drop table if exists ${table3};"
+
+
+ // auto-increment column is value, some of the values is null, some is
valid value
+ def table4 = "test_dup_table_auto_inc_start_value_basic_value_with_null_2"
+ sql "drop table if exists ${table4}"
+ sql """
+ CREATE TABLE IF NOT EXISTS `${table4}` (
+ `name` varchar(65533) NOT NULL COMMENT "用户姓名",
+ `value` int(11) NOT NULL COMMENT "用户得分",
+ `id` BIGINT NOT NULL AUTO_INCREMENT(10000) COMMENT "用户 ID"
+ ) ENGINE=OLAP
+ DUPLICATE KEY(`name`, `value`)
+ COMMENT "OLAP"
+ DISTRIBUTED BY HASH(`name`, `value`) BUCKETS 1
+ PROPERTIES (
+ "replication_allocation" = "tag.location.default: 1",
+ "in_memory" = "false",
+ "storage_format" = "V2"
+ )
+ """
+ streamLoad {
+ table "${table4}"
+
+ set 'column_separator', ','
+ set 'format', 'csv'
+ set 'columns', 'id, name, value'
+
+ file 'auto_inc_basic_with_null_2.csv'
+ time 10000 // limit inflight 10s
+ }
+ sql """sync"""
+ qt_auto_inc_ids "select * from ${table4} order by id;"
+ sql "drop table if exists ${table4};"
+
+ // insert stmt
+ def table5 = "test_dup_table_auto_inc_start_value_basic_insert_stmt"
+ sql "drop table if exists ${table5}"
+ sql """
+ CREATE TABLE IF NOT EXISTS `${table5}` (
+ `id` BIGINT NOT NULL AUTO_INCREMENT(10000) COMMENT "用户 ID",
+ `name` varchar(65533) NOT NULL COMMENT "用户姓名",
+ `value` int(11) NOT NULL COMMENT "用户得分"
+ ) ENGINE=OLAP
+ DUPLICATE KEY(`id`)
+ COMMENT "OLAP"
+ DISTRIBUTED BY HASH(`id`) BUCKETS 1
+ PROPERTIES (
+ "replication_allocation" = "tag.location.default: 1",
+ "in_memory" = "false",
+ "storage_format" = "V2"
+ )
+ """
+ sql """insert into ${table5} values
+ (null, "Bob", 100),
+ (null, "Alice", 200),
+ (null, "Tom", 300),
+ (null, "Test", 400),
+ (null, "Carter", 500),
+ (null, "Smith", 600),
+ (null, "Beata", 700),
+ (null, "Doris", 800),
+ (null, "Nereids", 900);"""
+ qt_sql "select * from ${table5} order by id;"
+
+ def table6 = "test_dup_table_auto_inc_start_value_basic_insert_stmt2"
+ def table7 = "test_dup_table_auto_inc_start_value_basic_insert_stmt3"
+ sql "drop table if exists ${table6}"
+ sql """
+ CREATE TABLE IF NOT EXISTS `${table6}` (
+ `id` BIGINT NOT NULL AUTO_INCREMENT(10000) COMMENT "",
+ `value` int(11) NOT NULL COMMENT ""
+ ) ENGINE=OLAP
+ DUPLICATE KEY(`id`)
+ COMMENT "OLAP"
+ DISTRIBUTED BY HASH(`id`) BUCKETS 1
+ PROPERTIES (
+ "replication_allocation" = "tag.location.default: 1",
+ "in_memory" = "false",
+ "storage_format" = "V2"
+ )
+ """
+ sql "drop table if exists ${table7}"
+ sql """
+ CREATE TABLE IF NOT EXISTS `${table7}` (
+ `x` BIGINT NOT NULL,
+ `y` BIGINT NOT NULL
+ ) ENGINE=OLAP
+ DUPLICATE KEY(`x`)
+ COMMENT "OLAP"
+ DISTRIBUTED BY HASH(`x`) BUCKETS 1
+ PROPERTIES (
+ "replication_allocation" = "tag.location.default: 1",
+ "in_memory" = "false",
+ "storage_format" = "V2"
+ )
+ """
+ sql "insert into ${table7} values(0,0),(1,1),(2,2),(3,3),(4,4),(5,5);"
+ sql "insert into ${table6} select null, y from ${table7};"
+ qt_sql "select * from ${table6} order by id"
+
+ def table8 = "test_dup_table_auto_inc_start_value_basic_insert_stmt4"
+ sql "drop table if exists ${table8}"
+ sql """
+ CREATE TABLE IF NOT EXISTS `${table8}` (
+ `id` BIGINT NOT NULL COMMENT "",
+ `value` BIGINT NOT NULL AUTO_INCREMENT(10000) COMMENT ""
+ ) ENGINE=OLAP
+ DUPLICATE KEY(`id`)
+ COMMENT "OLAP"
+ DISTRIBUTED BY HASH(`id`) BUCKETS 1
+ PROPERTIES (
+ "replication_allocation" = "tag.location.default: 1",
+ "in_memory" = "false",
+ "storage_format" = "V2"
+ )
+ """
+ sql "insert into ${table8} select x, null from ${table7};"
+ qt_sql "select * from ${table8} order by id"
+}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]