Repository: hive
Updated Branches:
  refs/heads/master 487714aaf -> ee5566b75


HIVE-20536 : Add Surrogate Keys function to Hive (Miklos Gergely via Ashutosh 
Chauhan)

Signed-off-by: Ashutosh Chauhan <[email protected]>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ee5566b7
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ee5566b7
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ee5566b7

Branch: refs/heads/master
Commit: ee5566b75fb69d7a77352f08e70a88a9f9080384
Parents: 487714a
Author: Miklos Gergely <[email protected]>
Authored: Wed Sep 19 20:49:23 2018 -0700
Committer: Ashutosh Chauhan <[email protected]>
Committed: Wed Sep 19 20:49:23 2018 -0700

----------------------------------------------------------------------
 .../hadoop/hive/ql/exec/FunctionRegistry.java   |   2 +
 .../hive/ql/parse/BaseSemanticAnalyzer.java     |   4 +-
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |  19 ++
 .../ql/udf/generic/GenericUDFSurrogateKey.java  | 137 ++++++++++++
 .../udf/generic/TestGenericUDFSurrogateKey.java | 206 +++++++++++++++++++
 .../results/clientpositive/show_functions.q.out |   1 +
 6 files changed, 368 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/ee5566b7/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index 3f538b3..0bc8d84 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -364,6 +364,8 @@ public final class FunctionRegistry {
     system.registerGenericUDF("restrict_information_schema", 
GenericUDFRestrictInformationSchema.class);
     system.registerGenericUDF("current_authorizer", 
GenericUDFCurrentAuthorizer.class);
 
+    system.registerGenericUDF("surrogate_key", GenericUDFSurrogateKey.class);
+
     system.registerGenericUDF("isnull", GenericUDFOPNull.class);
     system.registerGenericUDF("isnotnull", GenericUDFOPNotNull.class);
     system.registerGenericUDF("istrue", GenericUDFOPTrue.class);

http://git-wip-us.apache.org/repos/asf/hive/blob/ee5566b7/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
index 3309b9b..b655ab1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
@@ -94,6 +94,7 @@ import 
org.apache.hadoop.hive.ql.udf.generic.GenericUDFCurrentDate;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCurrentTimestamp;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCurrentUser;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFSurrogateKey;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
@@ -832,7 +833,8 @@ public abstract class BaseSemanticAnalyzer {
       if(defFunc.getGenericUDF() instanceof GenericUDFOPNull
           || defFunc.getGenericUDF() instanceof GenericUDFCurrentTimestamp
           || defFunc.getGenericUDF() instanceof GenericUDFCurrentDate
-          || defFunc.getGenericUDF() instanceof GenericUDFCurrentUser){
+          || defFunc.getGenericUDF() instanceof GenericUDFCurrentUser
+          || defFunc.getGenericUDF() instanceof GenericUDFSurrogateKey){
         return true;
       }
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/ee5566b7/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 98448e4..3873282 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -238,11 +238,13 @@ import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.ResourceType;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.Mode;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFArray;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCardinalityViolation;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFHash;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFMurmurHash;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFSurrogateKey;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDTFInline;
 import org.apache.hadoop.hive.ql.util.ResourceDownloader;
@@ -7705,6 +7707,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
         fileSinkDesc, fsRS, input), inputRR);
 
     handleLineage(ltd, output);
+    setWriteIdForSurrogateKeys(ltd, input);
 
     if (LOG.isDebugEnabled()) {
       LOG.debug("Created FileSink Plan for clause: " + dest + "dest_path: "
@@ -7949,6 +7952,22 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
     }
   }
 
+  private void setWriteIdForSurrogateKeys(LoadTableDesc ltd, Operator input) 
throws SemanticException {
+    Map<String, ExprNodeDesc> columnExprMap = 
input.getConf().getColumnExprMap();
+    if (ltd == null || columnExprMap == null) {
+      return;
+    }
+
+    for (ExprNodeDesc desc : columnExprMap.values()) {
+      if (desc instanceof ExprNodeGenericFuncDesc) {
+        GenericUDF genericUDF = 
((ExprNodeGenericFuncDesc)desc).getGenericUDF();
+        if (genericUDF instanceof GenericUDFSurrogateKey) {
+          ((GenericUDFSurrogateKey)genericUDF).setWriteId(ltd.getWriteId());
+        }
+      }
+    }
+  }
+
   private WriteEntity generateTableWriteEntity(String dest, Table dest_tab,
                                                Map<String, String> partSpec, 
LoadTableDesc ltd,
                                                DynamicPartitionCtx dpCtx, 
boolean isNonNativeTable)

http://git-wip-us.apache.org/repos/asf/hive/blob/ee5566b7/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSurrogateKey.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSurrogateKey.java 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSurrogateKey.java
new file mode 100644
index 0000000..1372b60
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSurrogateKey.java
@@ -0,0 +1,137 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.MapredContext;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.exec.tez.TezContext;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.UDFType;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantIntObjectInspector;
+import org.apache.hadoop.io.LongWritable;
+
+/**
+ * This function is not a deterministic function, and not a runtime constant.
+ * The return value is sequence within a query with a unique staring point 
based on write_id and task_id
+ */
+@UDFType(deterministic = false)
+public class GenericUDFSurrogateKey extends GenericUDF {
+  private static final int DEFAULT_WRITE_ID_BITS = 24;
+  private static final int DEFAULT_TASK_ID_BITS = 16;
+  private static final int DEFAULT_ROW_ID_BITS = 24;
+
+  private int writeIdBits;
+  private int taskIdBits;
+  private int rowIdBits;
+
+  private long maxWriteId;
+  private long maxTaskId;
+  private long maxRowId;
+
+  private long writeId = -1;
+  private long taskId = -1;
+  private long rowId = 0;
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws 
UDFArgumentException {
+    if (arguments.length == 0) {
+      writeIdBits = DEFAULT_WRITE_ID_BITS;
+      taskIdBits = DEFAULT_TASK_ID_BITS;
+      rowIdBits = DEFAULT_ROW_ID_BITS;
+    } else if (arguments.length == 2) {
+      for (int i = 0; i < 2; i++) {
+        if (arguments[i].getCategory() != Category.PRIMITIVE) {
+          throw new UDFArgumentTypeException(0,
+              "SURROGATE_KEY input only takes primitive types, got " + 
arguments[i].getTypeName());
+        }
+      }
+
+      writeIdBits = 
((WritableConstantIntObjectInspector)arguments[0]).getWritableConstantValue().get();
+      taskIdBits = 
((WritableConstantIntObjectInspector)arguments[1]).getWritableConstantValue().get();
+      rowIdBits = 64 - (writeIdBits + taskIdBits);
+
+      if (writeIdBits < 1 || writeIdBits > 62) {
+        throw new UDFArgumentException("Write ID bits must be between 1 and 62 
(value: " + writeIdBits + ")");
+      }
+      if (taskIdBits < 1 || taskIdBits > 62) {
+        throw new UDFArgumentException("Task ID bits must be between 1 and 62 
(value: " + taskIdBits + ")");
+      }
+      if (writeIdBits + taskIdBits > 63) {
+        throw new UDFArgumentException("Write ID bits + Task ID bits must be 
less than 63 (value: " +
+            (writeIdBits + taskIdBits) + ")");
+      }
+    } else {
+      throw new UDFArgumentLengthException(
+          "The function SURROGATE_KEY takes 0 or 2 integer arguments (write id 
bits, taks id bits), but found " +
+              arguments.length);
+    }
+
+    maxWriteId = (1L << writeIdBits) - 1;
+    maxTaskId = (1L << taskIdBits) - 1;
+    maxRowId = (1L << rowIdBits) - 1;
+
+    return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
+  }
+
+  @Override
+  public void configure(MapredContext context) {
+    if (context instanceof TezContext) {
+      taskId = ((TezContext)context).getTezProcessorContext().getTaskIndex();
+    } else {
+      throw new IllegalStateException("surrogate_key function is only 
supported if the execution engine is Tez");
+    }
+
+    if (taskId > maxTaskId) {
+      throw new IllegalStateException(String.format("Task ID is out of range 
(%d bits) in surrogate_key", taskIdBits));
+    }
+  }
+
+  public void setWriteId(long writeId) {
+    this.writeId = writeId;
+    if (writeId > maxWriteId) {
+      throw new IllegalStateException(String.format("Write ID is out of range 
(%d bits) in surrogate_key", writeIdBits));
+    }
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    if (writeId == -1) {
+      throw new HiveException("Could not obtain Write ID for the surrogate_key 
function");
+    }
+
+    if (rowId > maxRowId) {
+      throw new HiveException(String.format("Row ID is out of range (%d bits) 
in surrogate_key", rowIdBits));
+    }
+
+    long uniqueId = (writeId << (taskIdBits + rowIdBits)) + (taskId << 
rowIdBits) + rowId;
+    rowId++;
+
+    return new LongWritable(uniqueId);
+  }
+
+  @Override
+  public String getDisplayString(String[] children) {
+    return "SURROGATE_KEY()";
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/ee5566b7/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSurrogateKey.java
----------------------------------------------------------------------
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSurrogateKey.java
 
b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSurrogateKey.java
new file mode 100644
index 0000000..8426826
--- /dev/null
+++ 
b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSurrogateKey.java
@@ -0,0 +1,206 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.MapredContext;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.exec.tez.TezContext;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
+import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.tez.runtime.api.ProcessorContext;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import static org.junit.Assert.assertEquals;
+import org.mockito.Mockito;
+import static org.mockito.Mockito.when;
+
+import java.io.IOException;
+
+public class TestGenericUDFSurrogateKey {
+
+  private GenericUDFSurrogateKey udf;
+  private TezContext mockTezContext;
+  private ProcessorContext mockProcessorContest;
+  private ObjectInspector[] emptyArguments = {};
+
+  @Rule
+  public ExpectedException expectedException = ExpectedException.none();
+  
+  @Before
+  public void init() {
+    udf = new GenericUDFSurrogateKey();
+    mockTezContext = Mockito.mock(TezContext.class);
+    mockProcessorContest = Mockito.mock(ProcessorContext.class);
+    
when(mockTezContext.getTezProcessorContext()).thenReturn(mockProcessorContest);
+  }
+
+  @Test
+  public void testSurrogateKeyDefault() throws HiveException {
+    when(mockProcessorContest.getTaskIndex()).thenReturn(1);
+
+    udf.initialize(emptyArguments);
+    udf.configure(mockTezContext);
+    udf.setWriteId(1);
+
+    runAndVerifyConst((1L << 40) + (1L << 24), udf);
+    runAndVerifyConst((1L << 40) + (1L << 24) + 1, udf);
+    runAndVerifyConst((1L << 40) + (1L << 24) + 2, udf);
+  }
+
+  @Test
+  public void testSurrogateKeyBitsSet() throws HiveException {
+    when(mockProcessorContest.getTaskIndex()).thenReturn(1);
+
+    udf.initialize(getArguments(10, 10));
+    udf.configure(mockTezContext);
+    udf.setWriteId(1);
+
+    runAndVerifyConst((1L << 54) + (1L << 44), udf);
+    runAndVerifyConst((1L << 54) + (1L << 44) + 1, udf);
+    runAndVerifyConst((1L << 54) + (1L << 44) + 2, udf);
+  }
+
+  @Test
+  public void testIllegalNumberOfArgs() throws HiveException {
+    expectedException.expect(UDFArgumentLengthException.class);
+    expectedException.expectMessage(
+        "The function SURROGATE_KEY takes 0 or 2 integer arguments (write id 
bits, taks id bits), but found 1");
+
+    ConstantObjectInspector argument0 = 
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
+        TypeInfoFactory.intTypeInfo, new IntWritable(10));
+    ObjectInspector[] arguments = {argument0};
+
+    udf.initialize(arguments);
+  }
+
+  @Test
+  public void testWriteIdBitsOutOfRange() throws HiveException {
+    expectedException.expect(UDFArgumentException.class);
+    expectedException.expectMessage("Write ID bits must be between 1 and 62 
(value: 63)");
+
+    udf.initialize(getArguments(63, 10));
+  }
+
+  @Test
+  public void testTaskIdBitsOutOfRange() throws HiveException {
+    expectedException.expect(UDFArgumentException.class);
+    expectedException.expectMessage("Task ID bits must be between 1 and 62 
(value: 0)");
+
+    udf.initialize(getArguments(10, 0));
+  }
+
+  @Test
+  public void testBitSumOutOfRange() throws HiveException {
+    expectedException.expect(UDFArgumentException.class);
+    expectedException.expectMessage("Write ID bits + Task ID bits must be less 
than 63 (value: 80)");
+
+    udf.initialize(getArguments(40, 40));
+  }
+
+  @Test
+  public void testNotTezContext() throws HiveException {
+    expectedException.expect(IllegalStateException.class);
+    expectedException.expectMessage("surrogate_key function is only supported 
if the execution engine is Tez");
+
+    MapredContext mockContext = Mockito.mock(MapredContext.class);
+
+    udf.initialize(emptyArguments);
+    udf.configure(mockContext);
+  }
+
+  @Test
+  public void testNoWriteId() throws HiveException {
+    expectedException.expect(HiveException.class);
+    expectedException.expectMessage("Could not obtain Write ID for the 
surrogate_key function");
+
+    when(mockProcessorContest.getTaskIndex()).thenReturn(1);
+
+    udf.initialize(emptyArguments);
+    udf.configure(mockTezContext);
+
+    runAndVerifyConst(0, udf);
+  }
+
+  @Test
+  public void testWriteIdOverLimit() throws HiveException {
+    expectedException.expect(IllegalStateException.class);
+    expectedException.expectMessage("Write ID is out of range (10 bits) in 
surrogate_key");
+
+    udf.initialize(getArguments(10, 10));
+    udf.setWriteId(1 << 10);
+  }
+
+  @Test
+  public void testTaskIdOverLimit() throws HiveException {
+    expectedException.expect(IllegalStateException.class);
+    expectedException.expectMessage("Task ID is out of range (10 bits) in 
surrogate_key");
+
+    when(mockProcessorContest.getTaskIndex()).thenReturn(1 << 10);
+
+    udf.initialize(getArguments(10, 10));
+    udf.configure(mockTezContext);
+  }
+
+  @Test
+  public void testRowIdOverLimit() throws HiveException {
+    expectedException.expect(HiveException.class);
+    expectedException.expectMessage("Row ID is out of range (1 bits) in 
surrogate_key");
+
+    when(mockProcessorContest.getTaskIndex()).thenReturn(1);
+
+    udf.initialize(getArguments(32, 31));
+    udf.configure(mockTezContext);
+    udf.setWriteId(1);
+
+    runAndVerifyConst((1L << 32) + (1L << 1), udf);
+    runAndVerifyConst((1L << 32) + (1L << 1) + 1, udf);
+    runAndVerifyConst((1L << 32) + (1L << 1) + 2, udf);
+  }
+
+  private ObjectInspector[] getArguments(int writeIdBits, int taskIdBits) {
+    ConstantObjectInspector argument0 = 
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
+        TypeInfoFactory.intTypeInfo, new IntWritable(writeIdBits));
+    ConstantObjectInspector argument1 = 
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
+        TypeInfoFactory.intTypeInfo, new IntWritable(taskIdBits));
+    ObjectInspector[] arguments = {argument0, argument1};
+    return arguments;
+  }
+
+  private void runAndVerifyConst(long expResult, GenericUDFSurrogateKey udf)
+      throws HiveException {
+    DeferredObject[] args = {};
+    LongWritable output = (LongWritable)udf.evaluate(args);
+    assertEquals("surrogate_key() test ", expResult, output.get());
+  }
+
+  @After
+  public void close() throws IOException {
+    udf.close();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/ee5566b7/ql/src/test/results/clientpositive/show_functions.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/show_functions.q.out 
b/ql/src/test/results/clientpositive/show_functions.q.out
index 8d41e78..b4ba322 100644
--- a/ql/src/test/results/clientpositive/show_functions.q.out
+++ b/ql/src/test/results/clientpositive/show_functions.q.out
@@ -254,6 +254,7 @@ substr
 substring
 substring_index
 sum
+surrogate_key
 tan
 to_date
 to_epoch_milli

Reply via email to