spark git commit: Revert "[SPARK-16086] [SQL] fix Python UDF without arguments (for 1.6)"

2016-06-21 Thread meng
Repository: spark
Updated Branches:
  refs/heads/branch-2.0 34feea336 -> 37d05ec9e


Revert "[SPARK-16086] [SQL] fix Python UDF without arguments (for 1.6)"

This reverts commit 087bd2799366f4914d248e9b1f0fb921adbbdb43.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/37d05ec9
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/37d05ec9
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/37d05ec9

Branch: refs/heads/branch-2.0
Commit: 37d05ec9e96c0da786ee26b5c25216bf98f239c0
Parents: 34feea3
Author: Xiangrui Meng 
Authored: Tue Jun 21 00:33:38 2016 -0700
Committer: Xiangrui Meng 
Committed: Tue Jun 21 00:33:38 2016 -0700

--
 python/pyspark/sql/tests.py | 5 -
 python/pyspark/sql/types.py | 9 ++---
 2 files changed, 6 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/37d05ec9/python/pyspark/sql/tests.py
--
diff --git a/python/pyspark/sql/tests.py b/python/pyspark/sql/tests.py
index ecd1a05..c631ad8 100644
--- a/python/pyspark/sql/tests.py
+++ b/python/pyspark/sql/tests.py
@@ -318,11 +318,6 @@ class SQLTests(ReusedPySparkTestCase):
 [row] = self.spark.sql("SELECT double(add(1, 2)), add(double(2), 
1)").collect()
 self.assertEqual(tuple(row), (6, 5))
 
-def test_udf_without_arguments(self):
-self.sqlCtx.registerFunction("foo", lambda: "bar")
-[row] = self.sqlCtx.sql("SELECT foo()").collect()
-self.assertEqual(row[0], "bar")
-
 def test_udf_with_array_type(self):
 d = [Row(l=list(range(3)), d={"key": list(range(5))})]
 rdd = self.sc.parallelize(d)

http://git-wip-us.apache.org/repos/asf/spark/blob/37d05ec9/python/pyspark/sql/types.py
--
diff --git a/python/pyspark/sql/types.py b/python/pyspark/sql/types.py
index f0b56be..bb2b954 100644
--- a/python/pyspark/sql/types.py
+++ b/python/pyspark/sql/types.py
@@ -1401,7 +1401,11 @@ class Row(tuple):
 if args and kwargs:
 raise ValueError("Can not use both args "
  "and kwargs to create Row")
-if kwargs:
+if args:
+# create row class or objects
+return tuple.__new__(self, args)
+
+elif kwargs:
 # create row objects
 names = sorted(kwargs.keys())
 row = tuple.__new__(self, [kwargs[n] for n in names])
@@ -1409,8 +1413,7 @@ class Row(tuple):
 return row
 
 else:
-# create row class or objects
-return tuple.__new__(self, args)
+raise ValueError("No args or kwargs")
 
 def asDict(self, recursive=False):
 """


-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org



spark git commit: Revert "[SPARK-16086] [SQL] fix Python UDF without arguments (for 1.6)"

2016-06-21 Thread meng
Repository: spark
Updated Branches:
  refs/heads/master 843a1eba8 -> ce49bfc25


Revert "[SPARK-16086] [SQL] fix Python UDF without arguments (for 1.6)"

This reverts commit a46553cbacf0e4012df89fe55385dec5beaa680a.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/ce49bfc2
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/ce49bfc2
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/ce49bfc2

Branch: refs/heads/master
Commit: ce49bfc2550ba8f5a33235c7fc3b88201d63c276
Parents: 843a1eb
Author: Xiangrui Meng 
Authored: Tue Jun 21 00:32:51 2016 -0700
Committer: Xiangrui Meng 
Committed: Tue Jun 21 00:32:51 2016 -0700

--
 python/pyspark/sql/tests.py | 5 -
 python/pyspark/sql/types.py | 9 ++---
 2 files changed, 6 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/ce49bfc2/python/pyspark/sql/tests.py
--
diff --git a/python/pyspark/sql/tests.py b/python/pyspark/sql/tests.py
index ecd1a05..c631ad8 100644
--- a/python/pyspark/sql/tests.py
+++ b/python/pyspark/sql/tests.py
@@ -318,11 +318,6 @@ class SQLTests(ReusedPySparkTestCase):
 [row] = self.spark.sql("SELECT double(add(1, 2)), add(double(2), 
1)").collect()
 self.assertEqual(tuple(row), (6, 5))
 
-def test_udf_without_arguments(self):
-self.sqlCtx.registerFunction("foo", lambda: "bar")
-[row] = self.sqlCtx.sql("SELECT foo()").collect()
-self.assertEqual(row[0], "bar")
-
 def test_udf_with_array_type(self):
 d = [Row(l=list(range(3)), d={"key": list(range(5))})]
 rdd = self.sc.parallelize(d)

http://git-wip-us.apache.org/repos/asf/spark/blob/ce49bfc2/python/pyspark/sql/types.py
--
diff --git a/python/pyspark/sql/types.py b/python/pyspark/sql/types.py
index f0b56be..bb2b954 100644
--- a/python/pyspark/sql/types.py
+++ b/python/pyspark/sql/types.py
@@ -1401,7 +1401,11 @@ class Row(tuple):
 if args and kwargs:
 raise ValueError("Can not use both args "
  "and kwargs to create Row")
-if kwargs:
+if args:
+# create row class or objects
+return tuple.__new__(self, args)
+
+elif kwargs:
 # create row objects
 names = sorted(kwargs.keys())
 row = tuple.__new__(self, [kwargs[n] for n in names])
@@ -1409,8 +1413,7 @@ class Row(tuple):
 return row
 
 else:
-# create row class or objects
-return tuple.__new__(self, args)
+raise ValueError("No args or kwargs")
 
 def asDict(self, recursive=False):
 """


-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org