[
https://issues.apache.org/jira/browse/FLINK-3650?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15331687#comment-15331687
]
ASF GitHub Bot commented on FLINK-3650:
---------------------------------------
Github user fhueske commented on a diff in the pull request:
https://github.com/apache/flink/pull/1856#discussion_r67156229
--- Diff:
flink-scala/src/test/scala/org/apache/flink/api/operator/MinByOperatorTest.scala
---
@@ -0,0 +1,176 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.flink.api.operator
+
+import org.apache.flink.api.common.InvalidProgramException
+import org.apache.flink.api.scala.ExecutionEnvironment
+import org.apache.flink.api.scala._
+import org.junit.Test
+import org.junit.Assert
+
+class MinByOperatorTest {
+ private val emptyTupleData = List[scala.Tuple5[Int, Long, String, Long,
Int]]()
+ private val customTypeData = List[CustomType](new CustomType())
+ @Test
+ def testMinByKeyFieldsDataset(): Unit = {
+ val env = ExecutionEnvironment.getExecutionEnvironment
+ val collection = env.fromCollection(emptyTupleData)
+ try {
+ collection.minBy(4, 0, 1, 2, 3)
+ } catch {
+ case e : Exception => Assert.fail();
+ }
+ }
+
+ /**
+ * This test validates that an index which is out of bounds throws an
+ * IndexOutOfBOundsExcpetion.
+ */
+ @Test(expected = classOf[IndexOutOfBoundsException])
+ def testOutOfTupleBoundsDataset1() {
+
+ val env = ExecutionEnvironment.getExecutionEnvironment
+ val collection = env.fromCollection(emptyTupleData)
+
+ // should not work, key out of tuple bounds
+ collection.minBy(5)
+ }
+
+ /**
+ * This test validates that an index which is out of bounds throws an
+ * IndexOutOfBOundsExcpetion.
+ */
+ @Test(expected = classOf[IndexOutOfBoundsException])
+ def testOutOfTupleBoundsDataset2() {
+ val env = ExecutionEnvironment.getExecutionEnvironment
+ val collection = env.fromCollection(emptyTupleData)
+
+ // should not work, key out of tuple bounds
+ collection.minBy(-1)
+ }
+
+ /**
+ * This test validates that an index which is out of bounds throws an
+ * IndexOutOfBOundsExcpetion.
+ */
+ @Test(expected = classOf[IndexOutOfBoundsException])
+ def testOutOfTupleBoundsDataset3() {
+ val env = ExecutionEnvironment.getExecutionEnvironment
+ val collection = env.fromCollection(emptyTupleData)
+
+ // should not work, key out of tuple bounds
+ collection.minBy(1,2,3,4,-1)
+ }
+
+ /**
+ * This test validates that an InvalidProgrammException is thrown when
maxBy
--- End diff --
`maxBy` -> `minBy`
> Add maxBy/minBy to Scala DataSet API
> ------------------------------------
>
> Key: FLINK-3650
> URL: https://issues.apache.org/jira/browse/FLINK-3650
> Project: Flink
> Issue Type: Improvement
> Components: Java API, Scala API
> Affects Versions: 1.1.0
> Reporter: Till Rohrmann
> Assignee: ramkrishna.s.vasudevan
>
> The stable Java DataSet API contains the API calls {{maxBy}} and {{minBy}}.
> These methods are not supported by the Scala DataSet API. These methods
> should be added in order to have a consistent API.
--
This message was sent by Atlassian JIRA
(v6.3.4#6332)