Copilot commented on code in PR #9737:
URL: https://github.com/apache/gravitino/pull/9737#discussion_r2697741922
##########
core/src/main/java/org/apache/gravitino/catalog/ManagedFunctionOperations.java:
##########
@@ -245,9 +260,110 @@ private void
validateDefinitionsNoArityOverlap(FunctionDefinition[] definitions)
}
}
+ private FunctionEntity applyChanges(FunctionEntity oldEntity,
FunctionChange... changes) {
+ String newComment = oldEntity.comment();
+ List<FunctionDefinition> newDefinitions =
+ new ArrayList<>(Arrays.asList(oldEntity.definitions()));
+
+ for (FunctionChange change : changes) {
+ if (change instanceof FunctionChange.UpdateComment) {
+ newComment = ((FunctionChange.UpdateComment) change).newComment();
+
+ } else if (change instanceof FunctionChange.AddDefinition) {
+ FunctionDefinition defToAdd = ((FunctionChange.AddDefinition)
change).definition();
Review Comment:
The parameter order validation (via `findFirstOptionalParamIndex` in
`computeArities`) is only called when computing arities, but the new definition
being added might have invalid parameter order that won't be caught until arity
computation. The validation should be called explicitly before adding the
definition to provide clearer error messages. Consider calling
`computeArities(defToAdd)` explicitly before `validateNoArityOverlap` to ensure
parameter validation happens for the new definition.
```suggestion
FunctionDefinition defToAdd = ((FunctionChange.AddDefinition)
change).definition();
// Validate parameter ordering and compute arities for the new
definition early
computeArities(defToAdd);
```
##########
core/src/test/java/org/apache/gravitino/catalog/TestManagedFunctionOperations.java:
##########
@@ -490,4 +859,51 @@ private FunctionDefinition
createSimpleDefinition(FunctionParam[] params) {
FunctionImpl impl = FunctionImpls.ofJava(FunctionImpl.RuntimeType.SPARK,
"com.example.TestUDF");
return FunctionDefinitions.of(params, new FunctionImpl[] {impl});
}
+
+ @Test
+ public void testParameterOrderValidationInAlterFunction() {
Review Comment:
This test is placed after helper methods at the end of the class. Test
methods should be grouped together with other test methods, not interspersed
with helper methods. Consider moving this test method to be with the other
alter function tests (after line 735, before the helper methods section that
starts at line 737).
##########
core/src/test/java/org/apache/gravitino/catalog/TestManagedFunctionOperations.java:
##########
@@ -388,6 +477,263 @@ public void testOverlappingAritiesWithDifferentTypes() {
Assertions.assertEquals(2, func.definitions().length);
}
+ @Test
+ public void testAlterFunctionRemoveDefinition() {
+ NameIdentifier funcIdent = getFunctionIdent("func_remove_def");
+
+ // Create function with two definitions
+ FunctionParam[] params1 = new FunctionParam[] {FunctionParams.of("a",
Types.IntegerType.get())};
+ FunctionParam[] params2 = new FunctionParam[] {FunctionParams.of("b",
Types.StringType.get())};
+ FunctionDefinition[] definitions =
+ new FunctionDefinition[] {createSimpleDefinition(params1),
createSimpleDefinition(params2)};
+
+ functionOperations.registerFunction(
+ funcIdent, "Test function", FunctionType.SCALAR, true,
Types.StringType.get(), definitions);
+
+ // Remove one definition
+ Function updatedFunc =
+ functionOperations.alterFunction(funcIdent,
FunctionChange.removeDefinition(params1));
+
+ Assertions.assertEquals(1, updatedFunc.definitions().length);
+ }
+
+ @Test
+ public void testAlterFunctionRemoveOnlyDefinition() {
+ NameIdentifier funcIdent = getFunctionIdent("func_remove_only");
+ FunctionParam[] params = new FunctionParam[] {FunctionParams.of("a",
Types.IntegerType.get())};
+ FunctionDefinition[] definitions = new FunctionDefinition[]
{createSimpleDefinition(params)};
+
+ functionOperations.registerFunction(
+ funcIdent, "Test function", FunctionType.SCALAR, true,
Types.StringType.get(), definitions);
+
+ // Try to remove the only definition - should fail
+ Assertions.assertThrows(
+ IllegalArgumentException.class,
+ () -> functionOperations.alterFunction(funcIdent,
FunctionChange.removeDefinition(params)));
+ }
+
+ @Test
+ public void testAlterFunctionRemoveNonExistingDefinition() {
+ NameIdentifier funcIdent = getFunctionIdent("func_remove_nonexist");
+ FunctionParam[] params = new FunctionParam[] {FunctionParams.of("a",
Types.IntegerType.get())};
+ FunctionDefinition[] definitions = new FunctionDefinition[]
{createSimpleDefinition(params)};
+
+ functionOperations.registerFunction(
+ funcIdent, "Test function", FunctionType.SCALAR, true,
Types.StringType.get(), definitions);
+
+ // Try to remove a definition that doesn't exist
+ FunctionParam[] nonExistingParams =
+ new FunctionParam[] {FunctionParams.of("x", Types.StringType.get())};
+
+ Assertions.assertThrows(
+ IllegalArgumentException.class,
+ () ->
+ functionOperations.alterFunction(
+ funcIdent,
FunctionChange.removeDefinition(nonExistingParams)));
+ }
+
+ @Test
+ public void testAlterFunctionAddImpl() {
+ NameIdentifier funcIdent = getFunctionIdent("func_add_impl");
+ FunctionParam[] params = new FunctionParam[] {FunctionParams.of("a",
Types.IntegerType.get())};
+ FunctionImpl sparkImpl =
+ FunctionImpls.ofJava(FunctionImpl.RuntimeType.SPARK,
"com.example.SparkUDF");
+ FunctionDefinition[] definitions =
+ new FunctionDefinition[] {
+ createDefinitionWithImpls(params, new FunctionImpl[] {sparkImpl})
+ };
+
+ functionOperations.registerFunction(
+ funcIdent, "Test function", FunctionType.SCALAR, true,
Types.StringType.get(), definitions);
+
+ // Add Trino implementation
+ FunctionImpl trinoImpl =
+ FunctionImpls.ofJava(FunctionImpl.RuntimeType.TRINO,
"com.example.TrinoUDF");
+ org.apache.gravitino.function.Function updatedFunc =
+ functionOperations.alterFunction(funcIdent,
FunctionChange.addImpl(params, trinoImpl));
+
+ Assertions.assertEquals(2, updatedFunc.definitions()[0].impls().length);
+ }
+
+ @Test
+ public void testAlterFunctionAddImplDuplicateRuntime() {
+ NameIdentifier funcIdent = getFunctionIdent("func_add_impl_dup");
+ FunctionParam[] params = new FunctionParam[] {FunctionParams.of("a",
Types.IntegerType.get())};
+ FunctionImpl sparkImpl =
+ FunctionImpls.ofJava(FunctionImpl.RuntimeType.SPARK,
"com.example.SparkUDF");
+ FunctionDefinition[] definitions =
+ new FunctionDefinition[] {
+ createDefinitionWithImpls(params, new FunctionImpl[] {sparkImpl})
+ };
+
+ functionOperations.registerFunction(
+ funcIdent, "Test function", FunctionType.SCALAR, true,
Types.StringType.get(), definitions);
+
+ // Try to add another Spark implementation - should fail
+ FunctionImpl anotherSparkImpl =
+ FunctionImpls.ofJava(FunctionImpl.RuntimeType.SPARK,
"com.example.AnotherSparkUDF");
+
+ Assertions.assertThrows(
+ IllegalArgumentException.class,
+ () ->
+ functionOperations.alterFunction(
+ funcIdent, FunctionChange.addImpl(params, anotherSparkImpl)));
+ }
+
+ @Test
+ public void testAlterFunctionAddImplToNonExistingDefinition() {
+ NameIdentifier funcIdent = getFunctionIdent("func_add_impl_nodef");
+ FunctionParam[] params = new FunctionParam[] {FunctionParams.of("a",
Types.IntegerType.get())};
+ FunctionDefinition[] definitions = new FunctionDefinition[]
{createSimpleDefinition(params)};
+
+ functionOperations.registerFunction(
+ funcIdent, "Test function", FunctionType.SCALAR, true,
Types.StringType.get(), definitions);
+
+ // Try to add impl to non-existing definition
+ FunctionParam[] nonExistingParams =
+ new FunctionParam[] {FunctionParams.of("x", Types.StringType.get())};
+ FunctionImpl impl =
+ FunctionImpls.ofJava(FunctionImpl.RuntimeType.TRINO,
"com.example.TrinoUDF");
+
+ Assertions.assertThrows(
+ IllegalArgumentException.class,
+ () ->
+ functionOperations.alterFunction(
+ funcIdent, FunctionChange.addImpl(nonExistingParams, impl)));
+ }
+
+ @Test
+ public void testAlterFunctionUpdateImpl() {
+ NameIdentifier funcIdent = getFunctionIdent("func_update_impl");
+ FunctionParam[] params = new FunctionParam[] {FunctionParams.of("a",
Types.IntegerType.get())};
+ FunctionImpl sparkImpl =
+ FunctionImpls.ofJava(FunctionImpl.RuntimeType.SPARK,
"com.example.OldSparkUDF");
+ FunctionDefinition[] definitions =
+ new FunctionDefinition[] {
+ createDefinitionWithImpls(params, new FunctionImpl[] {sparkImpl})
+ };
+
+ functionOperations.registerFunction(
+ funcIdent, "Test function", FunctionType.SCALAR, true,
Types.StringType.get(), definitions);
+
+ // Update Spark implementation
+ FunctionImpl newSparkImpl =
+ FunctionImpls.ofJava(FunctionImpl.RuntimeType.SPARK,
"com.example.NewSparkUDF");
+ org.apache.gravitino.function.Function updatedFunc =
+ functionOperations.alterFunction(
+ funcIdent,
+ FunctionChange.updateImpl(params, FunctionImpl.RuntimeType.SPARK,
newSparkImpl));
+
+ Assertions.assertEquals(1, updatedFunc.definitions()[0].impls().length);
Review Comment:
The test verifies that there is still 1 implementation after update, but
doesn't verify that the implementation was actually updated to the new one.
Consider adding an assertion to check that the class name of the remaining
implementation is "com.example.NewSparkUDF" rather than
"com.example.OldSparkUDF".
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]