This is an automated email from the ASF dual-hosted git repository.
gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 1a1fcd61322b Revert "[SPARK-50310][CONNECT][PYTHON] Call
`with_origin_to_class` when the `Column` initializing"
1a1fcd61322b is described below
commit 1a1fcd61322b3cf9713e987a16882d7ddf666d2e
Author: Hyukjin Kwon <[email protected]>
AuthorDate: Fri Dec 27 14:58:46 2024 +0900
Revert "[SPARK-50310][CONNECT][PYTHON] Call `with_origin_to_class` when the
`Column` initializing"
This reverts commit fc69194fc03212035f4b42701dcbc409e5a36b03.
---
python/pyspark/sql/classic/column.py | 10 ++--------
python/pyspark/sql/connect/column.py | 10 ++--------
2 files changed, 4 insertions(+), 16 deletions(-)
diff --git a/python/pyspark/sql/classic/column.py
b/python/pyspark/sql/classic/column.py
index 05fcb2162822..c08eac7f6a04 100644
--- a/python/pyspark/sql/classic/column.py
+++ b/python/pyspark/sql/classic/column.py
@@ -33,6 +33,7 @@ from typing import (
from pyspark.sql.column import Column as ParentColumn
from pyspark.errors import PySparkAttributeError, PySparkTypeError,
PySparkValueError
+from pyspark.errors.utils import with_origin_to_class
from pyspark.sql.types import DataType
from pyspark.sql.utils import get_active_spark_context, enum_to_value
@@ -174,19 +175,12 @@ def _reverse_op(
return Column(jc)
+@with_origin_to_class
class Column(ParentColumn):
def __new__(
cls,
jc: "JavaObject",
) -> "Column":
- # We apply `with_origin_to_class` decorator here instead of top of the
class definition
- # to prevent circular import issue when initializing the SparkSession.
- # See https://github.com/apache/spark/pull/49054 for more detail.
- from pyspark.errors.utils import with_origin_to_class
-
- if not hasattr(cls, "_with_origin_applied"):
- cls = with_origin_to_class(cls)
- cls._with_origin_applied = True
self = object.__new__(cls)
self.__init__(jc) # type: ignore[misc]
return self
diff --git a/python/pyspark/sql/connect/column.py
b/python/pyspark/sql/connect/column.py
index 1e49afc0b158..c5733801814e 100644
--- a/python/pyspark/sql/connect/column.py
+++ b/python/pyspark/sql/connect/column.py
@@ -52,6 +52,7 @@ from pyspark.sql.connect.expressions import (
WithField,
DropField,
)
+from pyspark.errors.utils import with_origin_to_class
if TYPE_CHECKING:
@@ -106,19 +107,12 @@ def _to_expr(v: Any) -> Expression:
return v._expr if isinstance(v, Column) else
LiteralExpression._from_value(v)
+@with_origin_to_class(["to_plan"])
class Column(ParentColumn):
def __new__(
cls,
expr: "Expression",
) -> "Column":
- # We apply `with_origin_to_class` decorator here instead of top of the
class definition
- # to prevent circular import issue when initializing the SparkSession.
- # See https://github.com/apache/spark/pull/49054 for more detail.
- from pyspark.errors.utils import with_origin_to_class
-
- if not hasattr(cls, "_with_origin_applied"):
- cls = with_origin_to_class(["to_plan"])(cls)
- cls._with_origin_applied = True
self = object.__new__(cls)
self.__init__(expr) # type: ignore[misc]
return self
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]