This is an automated email from the ASF dual-hosted git repository.

mgrund pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 46de57b55113 [SPARK-52423][CONNECT] Add missing golang package name to 
pipelines.proto
46de57b55113 is described below

commit 46de57b55113ddf3da10d505550c467cf6997b75
Author: Martin Grund <martin.gr...@databricks.com>
AuthorDate: Wed Jun 11 04:09:34 2025 -0700

    [SPARK-52423][CONNECT] Add missing golang package name to pipelines.proto
    
    ### What changes were proposed in this pull request?
    The newly added `pipelines.proto` file is missing the golang package 
identifier that is needed for the Spark Connect Go client.
    
    ### Why are the changes needed?
    Spark 4.0 compatibility
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Manually tested as part of 
https://github.com/apache/spark-connect-go/pull/142.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #51126 from grundprinzip/missing-golang-package.
    
    Authored-by: Martin Grund <martin.gr...@databricks.com>
    Signed-off-by: Martin Grund <martin.gr...@databricks.com>
---
 python/pyspark/sql/connect/proto/pipelines_pb2.py                  | 6 ++++--
 sql/connect/common/src/main/protobuf/spark/connect/pipelines.proto | 1 +
 2 files changed, 5 insertions(+), 2 deletions(-)

diff --git a/python/pyspark/sql/connect/proto/pipelines_pb2.py 
b/python/pyspark/sql/connect/proto/pipelines_pb2.py
index 017e7285f884..e75c635a2658 100644
--- a/python/pyspark/sql/connect/proto/pipelines_pb2.py
+++ b/python/pyspark/sql/connect/proto/pipelines_pb2.py
@@ -40,7 +40,7 @@ from pyspark.sql.connect.proto import types_pb2 as 
spark_dot_connect_dot_types__
 
 
 DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
-    
b'\n\x1dspark/connect/pipelines.proto\x12\rspark.connect\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1dspark/connect/relations.proto\x1a\x19spark/connect/types.proto"\xe6\x12\n\x0fPipelineCommand\x12h\n\x15\x63reate_dataflow_graph\x18\x01
 
\x01(\x0b\x32\x32.spark.connect.PipelineCommand.CreateDataflowGraphH\x00R\x13\x63reateDataflowGraph\x12U\n\x0e\x64\x65\x66ine_dataset\x18\x02
 
\x01(\x0b\x32,.spark.connect.PipelineCommand.DefineDatasetH\x00R\rdefineDataset\x12L\n\x0b\x64\x65\x66ine_f
 [...]
+    
b'\n\x1dspark/connect/pipelines.proto\x12\rspark.connect\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1dspark/connect/relations.proto\x1a\x19spark/connect/types.proto"\xe6\x12\n\x0fPipelineCommand\x12h\n\x15\x63reate_dataflow_graph\x18\x01
 
\x01(\x0b\x32\x32.spark.connect.PipelineCommand.CreateDataflowGraphH\x00R\x13\x63reateDataflowGraph\x12U\n\x0e\x64\x65\x66ine_dataset\x18\x02
 
\x01(\x0b\x32,.spark.connect.PipelineCommand.DefineDatasetH\x00R\rdefineDataset\x12L\n\x0b\x64\x65\x66ine_f
 [...]
 )
 
 _globals = globals()
@@ -50,7 +50,9 @@ _builder.BuildTopDescriptorsAndMessages(
 )
 if not _descriptor._USE_C_DESCRIPTORS:
     _globals["DESCRIPTOR"]._loaded_options = None
-    _globals["DESCRIPTOR"]._serialized_options = 
b"\n\036org.apache.spark.connect.protoP\001"
+    _globals[
+        "DESCRIPTOR"
+    ]._serialized_options = 
b"\n\036org.apache.spark.connect.protoP\001Z\022internal/generated"
     
_globals["_PIPELINECOMMAND_CREATEDATAFLOWGRAPH_SQLCONFENTRY"]._loaded_options = 
None
     
_globals["_PIPELINECOMMAND_CREATEDATAFLOWGRAPH_SQLCONFENTRY"]._serialized_options
 = b"8\001"
     
_globals["_PIPELINECOMMAND_DEFINEDATASET_TABLEPROPERTIESENTRY"]._loaded_options 
= None
diff --git a/sql/connect/common/src/main/protobuf/spark/connect/pipelines.proto 
b/sql/connect/common/src/main/protobuf/spark/connect/pipelines.proto
index 2f685c6da42c..f4f1d3b043d3 100644
--- a/sql/connect/common/src/main/protobuf/spark/connect/pipelines.proto
+++ b/sql/connect/common/src/main/protobuf/spark/connect/pipelines.proto
@@ -25,6 +25,7 @@ import "spark/connect/types.proto";
 
 option java_multiple_files = true;
 option java_package = "org.apache.spark.connect.proto";
+option go_package = "internal/generated";
 
 // Dispatch object for pipelines commands. See each individual command for 
documentation.
 message PipelineCommand {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to