This is an automated email from the ASF dual-hosted git repository.
gengliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 419f46ab621 [SPARK-43895][CONNECT][GO] Prepare the go package path
419f46ab621 is described below
commit 419f46ab621e72e64dc9b897e416e56ea348cf1e
Author: Martin Grund <[email protected]>
AuthorDate: Wed May 31 13:17:09 2023 -0700
[SPARK-43895][CONNECT][GO] Prepare the go package path
### What changes were proposed in this pull request?
This patch adds the golang package path to the proto files to be consumed
from the golang repo.
### Why are the changes needed?
Preparation for the go client.
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
No functional change.
Closes #41403 from grundprinzip/SPARK-43895.
Authored-by: Martin Grund <[email protected]>
Signed-off-by: Gengliang Wang <[email protected]>
---
connector/connect/common/src/main/protobuf/buf.yaml | 2 ++
connector/connect/common/src/main/protobuf/spark/connect/base.proto | 1 +
.../connect/common/src/main/protobuf/spark/connect/catalog.proto | 1 +
.../connect/common/src/main/protobuf/spark/connect/commands.proto | 1 +
.../connect/common/src/main/protobuf/spark/connect/common.proto | 1 +
.../common/src/main/protobuf/spark/connect/example_plugins.proto | 1 +
.../common/src/main/protobuf/spark/connect/expressions.proto | 1 +
.../connect/common/src/main/protobuf/spark/connect/relations.proto | 1 +
.../connect/common/src/main/protobuf/spark/connect/types.proto | 1 +
python/pyspark/sql/connect/proto/base_pb2.py | 6 ++++--
python/pyspark/sql/connect/proto/catalog_pb2.py | 6 ++++--
python/pyspark/sql/connect/proto/commands_pb2.py | 6 ++++--
python/pyspark/sql/connect/proto/common_pb2.py | 6 ++++--
python/pyspark/sql/connect/proto/example_plugins_pb2.py | 6 ++++--
python/pyspark/sql/connect/proto/expressions_pb2.py | 6 ++++--
python/pyspark/sql/connect/proto/relations_pb2.py | 6 ++++--
python/pyspark/sql/connect/proto/types_pb2.py | 6 ++++--
17 files changed, 42 insertions(+), 16 deletions(-)
diff --git a/connector/connect/common/src/main/protobuf/buf.yaml
b/connector/connect/common/src/main/protobuf/buf.yaml
index 496e97af3fa..f17614a8dc4 100644
--- a/connector/connect/common/src/main/protobuf/buf.yaml
+++ b/connector/connect/common/src/main/protobuf/buf.yaml
@@ -18,6 +18,8 @@ version: v1
breaking:
use:
- FILE
+ except:
+ - FILE_SAME_GO_PACKAGE
lint:
use:
- DEFAULT
diff --git
a/connector/connect/common/src/main/protobuf/spark/connect/base.proto
b/connector/connect/common/src/main/protobuf/spark/connect/base.proto
index f54e28e3b61..e869712858a 100644
--- a/connector/connect/common/src/main/protobuf/spark/connect/base.proto
+++ b/connector/connect/common/src/main/protobuf/spark/connect/base.proto
@@ -28,6 +28,7 @@ import "spark/connect/types.proto";
option java_multiple_files = true;
option java_package = "org.apache.spark.connect.proto";
+option go_package = "internal/generated";
// A [[Plan]] is the structure that carries the runtime information for the
execution from the
// client to the server. A [[Plan]] can either be of the type [[Relation]]
which is a reference
diff --git
a/connector/connect/common/src/main/protobuf/spark/connect/catalog.proto
b/connector/connect/common/src/main/protobuf/spark/connect/catalog.proto
index 9729c102269..f048dbc7f25 100644
--- a/connector/connect/common/src/main/protobuf/spark/connect/catalog.proto
+++ b/connector/connect/common/src/main/protobuf/spark/connect/catalog.proto
@@ -24,6 +24,7 @@ import "spark/connect/types.proto";
option java_multiple_files = true;
option java_package = "org.apache.spark.connect.proto";
+option go_package = "internal/generated";
// Catalog messages are marked as unstable.
message Catalog {
diff --git
a/connector/connect/common/src/main/protobuf/spark/connect/commands.proto
b/connector/connect/common/src/main/protobuf/spark/connect/commands.proto
index 87d76c5d63f..e716364f69b 100644
--- a/connector/connect/common/src/main/protobuf/spark/connect/commands.proto
+++ b/connector/connect/common/src/main/protobuf/spark/connect/commands.proto
@@ -26,6 +26,7 @@ package spark.connect;
option java_multiple_files = true;
option java_package = "org.apache.spark.connect.proto";
+option go_package = "internal/generated";
// A [[Command]] is an operation that is executed by the server that does not
directly consume or
// produce a relational result.
diff --git
a/connector/connect/common/src/main/protobuf/spark/connect/common.proto
b/connector/connect/common/src/main/protobuf/spark/connect/common.proto
index 42cac88ea3f..5c538cf1082 100644
--- a/connector/connect/common/src/main/protobuf/spark/connect/common.proto
+++ b/connector/connect/common/src/main/protobuf/spark/connect/common.proto
@@ -21,6 +21,7 @@ package spark.connect;
option java_multiple_files = true;
option java_package = "org.apache.spark.connect.proto";
+option go_package = "internal/generated";
// StorageLevel for persisting Datasets/Tables.
message StorageLevel {
diff --git
a/connector/connect/common/src/main/protobuf/spark/connect/example_plugins.proto
b/connector/connect/common/src/main/protobuf/spark/connect/example_plugins.proto
index 03208c7a439..7ad171d2e8a 100644
---
a/connector/connect/common/src/main/protobuf/spark/connect/example_plugins.proto
+++
b/connector/connect/common/src/main/protobuf/spark/connect/example_plugins.proto
@@ -19,6 +19,7 @@ syntax = 'proto3';
import "spark/connect/relations.proto";
import "spark/connect/expressions.proto";
+option go_package = "internal/generated";
package spark.connect;
diff --git
a/connector/connect/common/src/main/protobuf/spark/connect/expressions.proto
b/connector/connect/common/src/main/protobuf/spark/connect/expressions.proto
index 67a5f8866a9..37a8778865d 100644
--- a/connector/connect/common/src/main/protobuf/spark/connect/expressions.proto
+++ b/connector/connect/common/src/main/protobuf/spark/connect/expressions.proto
@@ -24,6 +24,7 @@ package spark.connect;
option java_multiple_files = true;
option java_package = "org.apache.spark.connect.proto";
+option go_package = "internal/generated";
// Expression used to refer to fields, functions and similar. This can be used
everywhere
// expressions in SQL appear.
diff --git
a/connector/connect/common/src/main/protobuf/spark/connect/relations.proto
b/connector/connect/common/src/main/protobuf/spark/connect/relations.proto
index 68133f509f3..6347bd7bc56 100644
--- a/connector/connect/common/src/main/protobuf/spark/connect/relations.proto
+++ b/connector/connect/common/src/main/protobuf/spark/connect/relations.proto
@@ -26,6 +26,7 @@ import "spark/connect/catalog.proto";
option java_multiple_files = true;
option java_package = "org.apache.spark.connect.proto";
+option go_package = "internal/generated";
// The main [[Relation]] type. Fundamentally, a relation is a typed container
// that has exactly one explicit relation type set.
diff --git
a/connector/connect/common/src/main/protobuf/spark/connect/types.proto
b/connector/connect/common/src/main/protobuf/spark/connect/types.proto
index 68833b5d220..43552381d28 100644
--- a/connector/connect/common/src/main/protobuf/spark/connect/types.proto
+++ b/connector/connect/common/src/main/protobuf/spark/connect/types.proto
@@ -21,6 +21,7 @@ package spark.connect;
option java_multiple_files = true;
option java_package = "org.apache.spark.connect.proto";
+option go_package = "internal/generated";
// This message describes the logical [[DataType]] of something. It does not
carry the value
// itself but only describes it.
diff --git a/python/pyspark/sql/connect/proto/base_pb2.py
b/python/pyspark/sql/connect/proto/base_pb2.py
index 55ee830bfd4..160109b9569 100644
--- a/python/pyspark/sql/connect/proto/base_pb2.py
+++ b/python/pyspark/sql/connect/proto/base_pb2.py
@@ -38,7 +38,7 @@ from pyspark.sql.connect.proto import types_pb2 as
spark_dot_connect_dot_types__
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
-
b'\n\x18spark/connect/base.proto\x12\rspark.connect\x1a\x19google/protobuf/any.proto\x1a\x1cspark/connect/commands.proto\x1a\x1aspark/connect/common.proto\x1a\x1fspark/connect/expressions.proto\x1a\x1dspark/connect/relations.proto\x1a\x19spark/connect/types.proto"t\n\x04Plan\x12-\n\x04root\x18\x01
\x01(\x0b\x32\x17.spark.connect.RelationH\x00R\x04root\x12\x32\n\x07\x63ommand\x18\x02
\x01(\x0b\x32\x16.spark.connect.CommandH\x00R\x07\x63ommandB\t\n\x07op_type"z\n\x0bUserContext\x12\x17
[...]
+
b'\n\x18spark/connect/base.proto\x12\rspark.connect\x1a\x19google/protobuf/any.proto\x1a\x1cspark/connect/commands.proto\x1a\x1aspark/connect/common.proto\x1a\x1fspark/connect/expressions.proto\x1a\x1dspark/connect/relations.proto\x1a\x19spark/connect/types.proto"t\n\x04Plan\x12-\n\x04root\x18\x01
\x01(\x0b\x32\x17.spark.connect.RelationH\x00R\x04root\x12\x32\n\x07\x63ommand\x18\x02
\x01(\x0b\x32\x16.spark.connect.CommandH\x00R\x07\x63ommandB\t\n\x07op_type"z\n\x0bUserContext\x12\x17
[...]
)
@@ -787,7 +787,9 @@ _SPARKCONNECTSERVICE =
DESCRIPTOR.services_by_name["SparkConnectService"]
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
- DESCRIPTOR._serialized_options =
b"\n\036org.apache.spark.connect.protoP\001"
+ DESCRIPTOR._serialized_options = (
+ b"\n\036org.apache.spark.connect.protoP\001Z\022internal/generated"
+ )
_EXECUTEPLANRESPONSE_METRICS_METRICOBJECT_EXECUTIONMETRICSENTRY._options =
None
_EXECUTEPLANRESPONSE_METRICS_METRICOBJECT_EXECUTIONMETRICSENTRY._serialized_options
= b"8\001"
_ARTIFACTSTATUSESRESPONSE_STATUSESENTRY._options = None
diff --git a/python/pyspark/sql/connect/proto/catalog_pb2.py
b/python/pyspark/sql/connect/proto/catalog_pb2.py
index adfdb4ec0c9..76c29fedc0c 100644
--- a/python/pyspark/sql/connect/proto/catalog_pb2.py
+++ b/python/pyspark/sql/connect/proto/catalog_pb2.py
@@ -34,7 +34,7 @@ from pyspark.sql.connect.proto import types_pb2 as
spark_dot_connect_dot_types__
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
-
b'\n\x1bspark/connect/catalog.proto\x12\rspark.connect\x1a\x1aspark/connect/common.proto\x1a\x19spark/connect/types.proto"\xc6\x0e\n\x07\x43\x61talog\x12K\n\x10\x63urrent_database\x18\x01
\x01(\x0b\x32\x1e.spark.connect.CurrentDatabaseH\x00R\x0f\x63urrentDatabase\x12U\n\x14set_current_database\x18\x02
\x01(\x0b\x32!.spark.connect.SetCurrentDatabaseH\x00R\x12setCurrentDatabase\x12\x45\n\x0elist_databases\x18\x03
\x01(\x0b\x32\x1c.spark.connect.ListDatabasesH\x00R\rlistDatabases\x12<\n [...]
+
b'\n\x1bspark/connect/catalog.proto\x12\rspark.connect\x1a\x1aspark/connect/common.proto\x1a\x19spark/connect/types.proto"\xc6\x0e\n\x07\x43\x61talog\x12K\n\x10\x63urrent_database\x18\x01
\x01(\x0b\x32\x1e.spark.connect.CurrentDatabaseH\x00R\x0f\x63urrentDatabase\x12U\n\x14set_current_database\x18\x02
\x01(\x0b\x32!.spark.connect.SetCurrentDatabaseH\x00R\x12setCurrentDatabase\x12\x45\n\x0elist_databases\x18\x03
\x01(\x0b\x32\x1c.spark.connect.ListDatabasesH\x00R\rlistDatabases\x12<\n [...]
)
@@ -387,7 +387,9 @@ _sym_db.RegisterMessage(ListCatalogs)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
- DESCRIPTOR._serialized_options =
b"\n\036org.apache.spark.connect.protoP\001"
+ DESCRIPTOR._serialized_options = (
+ b"\n\036org.apache.spark.connect.protoP\001Z\022internal/generated"
+ )
_CREATEEXTERNALTABLE_OPTIONSENTRY._options = None
_CREATEEXTERNALTABLE_OPTIONSENTRY._serialized_options = b"8\001"
_CREATETABLE_OPTIONSENTRY._options = None
diff --git a/python/pyspark/sql/connect/proto/commands_pb2.py
b/python/pyspark/sql/connect/proto/commands_pb2.py
index 213065650ae..9bc86d25f21 100644
--- a/python/pyspark/sql/connect/proto/commands_pb2.py
+++ b/python/pyspark/sql/connect/proto/commands_pb2.py
@@ -36,7 +36,7 @@ from pyspark.sql.connect.proto import relations_pb2 as
spark_dot_connect_dot_rel
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
-
b'\n\x1cspark/connect/commands.proto\x12\rspark.connect\x1a\x19google/protobuf/any.proto\x1a\x1aspark/connect/common.proto\x1a\x1fspark/connect/expressions.proto\x1a\x1dspark/connect/relations.proto"\x86\x07\n\x07\x43ommand\x12]\n\x11register_function\x18\x01
\x01(\x0b\x32..spark.connect.CommonInlineUserDefinedFunctionH\x00R\x10registerFunction\x12H\n\x0fwrite_operation\x18\x02
\x01(\x0b\x32\x1d.spark.connect.WriteOperationH\x00R\x0ewriteOperation\x12_\n\x15\x63reate_dataframe_view\x
[...]
+
b'\n\x1cspark/connect/commands.proto\x12\rspark.connect\x1a\x19google/protobuf/any.proto\x1a\x1aspark/connect/common.proto\x1a\x1fspark/connect/expressions.proto\x1a\x1dspark/connect/relations.proto"\x86\x07\n\x07\x43ommand\x12]\n\x11register_function\x18\x01
\x01(\x0b\x32..spark.connect.CommonInlineUserDefinedFunctionH\x00R\x10registerFunction\x12H\n\x0fwrite_operation\x18\x02
\x01(\x0b\x32\x1d.spark.connect.WriteOperationH\x00R\x0ewriteOperation\x12_\n\x15\x63reate_dataframe_view\x
[...]
)
@@ -469,7 +469,9 @@
_sym_db.RegisterMessage(GetResourcesCommandResult.ResourcesEntry)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
- DESCRIPTOR._serialized_options =
b"\n\036org.apache.spark.connect.protoP\001"
+ DESCRIPTOR._serialized_options = (
+ b"\n\036org.apache.spark.connect.protoP\001Z\022internal/generated"
+ )
_SQLCOMMAND_ARGSENTRY._options = None
_SQLCOMMAND_ARGSENTRY._serialized_options = b"8\001"
_WRITEOPERATION_OPTIONSENTRY._options = None
diff --git a/python/pyspark/sql/connect/proto/common_pb2.py
b/python/pyspark/sql/connect/proto/common_pb2.py
index d4183c54cba..cde2354c853 100644
--- a/python/pyspark/sql/connect/proto/common_pb2.py
+++ b/python/pyspark/sql/connect/proto/common_pb2.py
@@ -30,7 +30,7 @@ _sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
-
b'\n\x1aspark/connect/common.proto\x12\rspark.connect"\xb0\x01\n\x0cStorageLevel\x12\x19\n\x08use_disk\x18\x01
\x01(\x08R\x07useDisk\x12\x1d\n\nuse_memory\x18\x02 \x01(\x08R\tuseMemory\x12
\n\x0cuse_off_heap\x18\x03
\x01(\x08R\nuseOffHeap\x12"\n\x0c\x64\x65serialized\x18\x04
\x01(\x08R\x0c\x64\x65serialized\x12 \n\x0breplication\x18\x05
\x01(\x05R\x0breplication"G\n\x13ResourceInformation\x12\x12\n\x04name\x18\x01
\x01(\tR\x04name\x12\x1c\n\taddresses\x18\x02 \x03(\tR\taddressesB"\n\ [...]
+
b'\n\x1aspark/connect/common.proto\x12\rspark.connect"\xb0\x01\n\x0cStorageLevel\x12\x19\n\x08use_disk\x18\x01
\x01(\x08R\x07useDisk\x12\x1d\n\nuse_memory\x18\x02 \x01(\x08R\tuseMemory\x12
\n\x0cuse_off_heap\x18\x03
\x01(\x08R\nuseOffHeap\x12"\n\x0c\x64\x65serialized\x18\x04
\x01(\x08R\x0c\x64\x65serialized\x12 \n\x0breplication\x18\x05
\x01(\x05R\x0breplication"G\n\x13ResourceInformation\x12\x12\n\x04name\x18\x01
\x01(\tR\x04name\x12\x1c\n\taddresses\x18\x02 \x03(\tR\taddressesB6\n\ [...]
)
@@ -61,7 +61,9 @@ _sym_db.RegisterMessage(ResourceInformation)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
- DESCRIPTOR._serialized_options =
b"\n\036org.apache.spark.connect.protoP\001"
+ DESCRIPTOR._serialized_options = (
+ b"\n\036org.apache.spark.connect.protoP\001Z\022internal/generated"
+ )
_STORAGELEVEL._serialized_start = 46
_STORAGELEVEL._serialized_end = 222
_RESOURCEINFORMATION._serialized_start = 224
diff --git a/python/pyspark/sql/connect/proto/example_plugins_pb2.py
b/python/pyspark/sql/connect/proto/example_plugins_pb2.py
index 4223fc91a69..625b1b0899f 100644
--- a/python/pyspark/sql/connect/proto/example_plugins_pb2.py
+++ b/python/pyspark/sql/connect/proto/example_plugins_pb2.py
@@ -34,7 +34,7 @@ from pyspark.sql.connect.proto import expressions_pb2 as
spark_dot_connect_dot_e
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
-
b'\n#spark/connect/example_plugins.proto\x12\rspark.connect\x1a\x1dspark/connect/relations.proto\x1a\x1fspark/connect/expressions.proto"i\n\x15\x45xamplePluginRelation\x12-\n\x05input\x18\x01
\x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12!\n\x0c\x63ustom_field\x18\x02
\x01(\tR\x0b\x63ustomField"m\n\x17\x45xamplePluginExpression\x12/\n\x05\x63hild\x18\x01
\x01(\x0b\x32\x19.spark.connect.ExpressionR\x05\x63hild\x12!\n\x0c\x63ustom_field\x18\x02
\x01(\tR\x0b\x63ustomField"9\n\x1 [...]
+
b'\n#spark/connect/example_plugins.proto\x12\rspark.connect\x1a\x1dspark/connect/relations.proto\x1a\x1fspark/connect/expressions.proto"i\n\x15\x45xamplePluginRelation\x12-\n\x05input\x18\x01
\x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12!\n\x0c\x63ustom_field\x18\x02
\x01(\tR\x0b\x63ustomField"m\n\x17\x45xamplePluginExpression\x12/\n\x05\x63hild\x18\x01
\x01(\x0b\x32\x19.spark.connect.ExpressionR\x05\x63hild\x12!\n\x0c\x63ustom_field\x18\x02
\x01(\tR\x0b\x63ustomField"9\n\x1 [...]
)
@@ -77,7 +77,9 @@ _sym_db.RegisterMessage(ExamplePluginCommand)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
- DESCRIPTOR._serialized_options =
b"\n\036org.apache.spark.connect.protoP\001"
+ DESCRIPTOR._serialized_options = (
+ b"\n\036org.apache.spark.connect.protoP\001Z\022internal/generated"
+ )
_EXAMPLEPLUGINRELATION._serialized_start = 118
_EXAMPLEPLUGINRELATION._serialized_end = 223
_EXAMPLEPLUGINEXPRESSION._serialized_start = 225
diff --git a/python/pyspark/sql/connect/proto/expressions_pb2.py
b/python/pyspark/sql/connect/proto/expressions_pb2.py
index 6bcf11afe1f..cbff74321f3 100644
--- a/python/pyspark/sql/connect/proto/expressions_pb2.py
+++ b/python/pyspark/sql/connect/proto/expressions_pb2.py
@@ -34,7 +34,7 @@ from pyspark.sql.connect.proto import types_pb2 as
spark_dot_connect_dot_types__
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
-
b'\n\x1fspark/connect/expressions.proto\x12\rspark.connect\x1a\x19google/protobuf/any.proto\x1a\x19spark/connect/types.proto"\x95+\n\nExpression\x12=\n\x07literal\x18\x01
\x01(\x0b\x32!.spark.connect.Expression.LiteralH\x00R\x07literal\x12\x62\n\x14unresolved_attribute\x18\x02
\x01(\x0b\x32-.spark.connect.Expression.UnresolvedAttributeH\x00R\x13unresolvedAttribute\x12_\n\x13unresolved_function\x18\x03
\x01(\x0b\x32,.spark.connect.Expression.UnresolvedFunctionH\x00R\x12unresolvedFunct
[...]
+
b'\n\x1fspark/connect/expressions.proto\x12\rspark.connect\x1a\x19google/protobuf/any.proto\x1a\x19spark/connect/types.proto"\x95+\n\nExpression\x12=\n\x07literal\x18\x01
\x01(\x0b\x32!.spark.connect.Expression.LiteralH\x00R\x07literal\x12\x62\n\x14unresolved_attribute\x18\x02
\x01(\x0b\x32-.spark.connect.Expression.UnresolvedAttributeH\x00R\x13unresolvedAttribute\x12_\n\x13unresolved_function\x18\x03
\x01(\x0b\x32,.spark.connect.Expression.UnresolvedFunctionH\x00R\x12unresolvedFunct
[...]
)
@@ -343,7 +343,9 @@ _sym_db.RegisterMessage(JavaUDF)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
- DESCRIPTOR._serialized_options =
b"\n\036org.apache.spark.connect.protoP\001"
+ DESCRIPTOR._serialized_options = (
+ b"\n\036org.apache.spark.connect.protoP\001Z\022internal/generated"
+ )
_EXPRESSION._serialized_start = 105
_EXPRESSION._serialized_end = 5630
_EXPRESSION_WINDOW._serialized_start = 1475
diff --git a/python/pyspark/sql/connect/proto/relations_pb2.py
b/python/pyspark/sql/connect/proto/relations_pb2.py
index a61223caf12..7b1c55408be 100644
--- a/python/pyspark/sql/connect/proto/relations_pb2.py
+++ b/python/pyspark/sql/connect/proto/relations_pb2.py
@@ -36,7 +36,7 @@ from pyspark.sql.connect.proto import catalog_pb2 as
spark_dot_connect_dot_catal
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
-
b'\n\x1dspark/connect/relations.proto\x12\rspark.connect\x1a\x19google/protobuf/any.proto\x1a\x1fspark/connect/expressions.proto\x1a\x19spark/connect/types.proto\x1a\x1bspark/connect/catalog.proto"\xf3\x16\n\x08Relation\x12\x35\n\x06\x63ommon\x18\x01
\x01(\x0b\x32\x1d.spark.connect.RelationCommonR\x06\x63ommon\x12)\n\x04read\x18\x02
\x01(\x0b\x32\x13.spark.connect.ReadH\x00R\x04read\x12\x32\n\x07project\x18\x03
\x01(\x0b\x32\x16.spark.connect.ProjectH\x00R\x07project\x12/\n\x06\x66il [...]
+
b'\n\x1dspark/connect/relations.proto\x12\rspark.connect\x1a\x19google/protobuf/any.proto\x1a\x1fspark/connect/expressions.proto\x1a\x19spark/connect/types.proto\x1a\x1bspark/connect/catalog.proto"\xf3\x16\n\x08Relation\x12\x35\n\x06\x63ommon\x18\x01
\x01(\x0b\x32\x1d.spark.connect.RelationCommonR\x06\x63ommon\x12)\n\x04read\x18\x02
\x01(\x0b\x32\x13.spark.connect.ReadH\x00R\x04read\x12\x32\n\x07project\x18\x03
\x01(\x0b\x32\x16.spark.connect.ProjectH\x00R\x07project\x12/\n\x06\x66il [...]
)
@@ -758,7 +758,9 @@ _sym_db.RegisterMessage(Parse.OptionsEntry)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
- DESCRIPTOR._serialized_options =
b"\n\036org.apache.spark.connect.protoP\001"
+ DESCRIPTOR._serialized_options = (
+ b"\n\036org.apache.spark.connect.protoP\001Z\022internal/generated"
+ )
_SQL_ARGSENTRY._options = None
_SQL_ARGSENTRY._serialized_options = b"8\001"
_READ_NAMEDTABLE_OPTIONSENTRY._options = None
diff --git a/python/pyspark/sql/connect/proto/types_pb2.py
b/python/pyspark/sql/connect/proto/types_pb2.py
index eec58d5cee6..d844d2da6fc 100644
--- a/python/pyspark/sql/connect/proto/types_pb2.py
+++ b/python/pyspark/sql/connect/proto/types_pb2.py
@@ -30,7 +30,7 @@ _sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
- b'\n\x19spark/connect/types.proto\x12\rspark.connect"\xc7
\n\x08\x44\x61taType\x12\x32\n\x04null\x18\x01
\x01(\x0b\x32\x1c.spark.connect.DataType.NULLH\x00R\x04null\x12\x38\n\x06\x62inary\x18\x02
\x01(\x0b\x32\x1e.spark.connect.DataType.BinaryH\x00R\x06\x62inary\x12;\n\x07\x62oolean\x18\x03
\x01(\x0b\x32\x1f.spark.connect.DataType.BooleanH\x00R\x07\x62oolean\x12\x32\n\x04\x62yte\x18\x04
\x01(\x0b\x32\x1c.spark.connect.DataType.ByteH\x00R\x04\x62yte\x12\x35\n\x05short\x18\x05
\x01(\x0 [...]
+ b"\n\x19spark/connect/types.proto\x12\rspark.connect\"\xc7
\n\x08\x44\x61taType\x12\x32\n\x04null\x18\x01
\x01(\x0b\x32\x1c.spark.connect.DataType.NULLH\x00R\x04null\x12\x38\n\x06\x62inary\x18\x02
\x01(\x0b\x32\x1e.spark.connect.DataType.BinaryH\x00R\x06\x62inary\x12;\n\x07\x62oolean\x18\x03
\x01(\x0b\x32\x1f.spark.connect.DataType.BooleanH\x00R\x07\x62oolean\x12\x32\n\x04\x62yte\x18\x04
\x01(\x0b\x32\x1c.spark.connect.DataType.ByteH\x00R\x04\x62yte\x12\x35\n\x05short\x18\x05
\x01(\x [...]
)
@@ -324,7 +324,9 @@ _sym_db.RegisterMessage(DataType.Unparsed)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
- DESCRIPTOR._serialized_options =
b"\n\036org.apache.spark.connect.protoP\001"
+ DESCRIPTOR._serialized_options = (
+ b"\n\036org.apache.spark.connect.protoP\001Z\022internal/generated"
+ )
_DATATYPE._serialized_start = 45
_DATATYPE._serialized_end = 4212
_DATATYPE_BOOLEAN._serialized_start = 1534
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]