This is an automated email from the ASF dual-hosted git repository. ruifengz pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new 39e08b10c24 [SPARK-40557][CONNECT][FOLLOW-UP] Sync python generated proto files 39e08b10c24 is described below commit 39e08b10c246ca1d47ed6adb8992802bd1113657 Author: Rui Wang <rui.w...@databricks.com> AuthorDate: Fri Oct 14 12:03:11 2022 +0800 [SPARK-40557][CONNECT][FOLLOW-UP] Sync python generated proto files ### What changes were proposed in this pull request? This PR syncs python generated proto files. The proto changes in this file is generated by https://github.com/apache/spark/blob/master/connector/connect/dev/generate_protos.sh. ### Why are the changes needed? Python client side proto files are out of sync. Other python related PRs needs to re-generate proto files which has caused troubles on code review. We are looking for ways to automatically keep the python proto files in sync. Before that is done, we need to manually update the proto files. ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? UT Closes #38244 from amaliujia/sync_python_proto. Authored-by: Rui Wang <rui.w...@databricks.com> Signed-off-by: Ruifeng Zheng <ruife...@apache.org> --- python/pyspark/sql/connect/plan.py | 4 +- python/pyspark/sql/connect/proto/commands_pb2.py | 8 +- python/pyspark/sql/connect/proto/commands_pb2.pyi | 8 +- .../pyspark/sql/connect/proto/expressions_pb2.py | 62 +++--- .../pyspark/sql/connect/proto/expressions_pb2.pyi | 90 +++++++- python/pyspark/sql/connect/proto/relations_pb2.py | 82 ++++---- python/pyspark/sql/connect/proto/relations_pb2.pyi | 164 ++++++++------- python/pyspark/sql/connect/proto/types_pb2.py | 102 ++++----- python/pyspark/sql/connect/proto/types_pb2.pyi | 232 +++++++++++---------- 9 files changed, 424 insertions(+), 328 deletions(-) diff --git a/python/pyspark/sql/connect/plan.py b/python/pyspark/sql/connect/plan.py index 09f6680a416..67ed6b964fa 100644 --- a/python/pyspark/sql/connect/plan.py +++ b/python/pyspark/sql/connect/plan.py @@ -319,9 +319,9 @@ class Aggregate(LogicalPlan): def _convert_measure( self, m: MeasureType, session: Optional["RemoteSparkSession"] - ) -> proto.Aggregate.Measure: + ) -> proto.Aggregate.AggregateFunction: exp, fun = m - measure = proto.Aggregate.Measure() + measure = proto.Aggregate.AggregateFunction() measure.function.name = fun if type(exp) is str: measure.function.arguments.append(self.unresolved_attr(exp)) diff --git a/python/pyspark/sql/connect/proto/commands_pb2.py b/python/pyspark/sql/connect/proto/commands_pb2.py index 46d405dd008..875f5d02db2 100644 --- a/python/pyspark/sql/connect/proto/commands_pb2.py +++ b/python/pyspark/sql/connect/proto/commands_pb2.py @@ -32,7 +32,7 @@ from pyspark.sql.connect.proto import types_pb2 as spark_dot_connect_dot_types__ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n\x1cspark/connect/commands.proto\x12\rspark.connect\x1a\x19spark/connect/types.proto"i\n\x07\x43ommand\x12N\n\x0f\x63reate_function\x18\x01 \x01(\x0b\x32#.spark.connect.CreateScalarFunctionH\x00R\x0e\x63reateFunctionB\x0e\n\x0c\x63ommand_type"\x8f\x04\n\x14\x43reateScalarFunction\x12\x14\n\x05parts\x18\x01 \x03(\tR\x05parts\x12P\n\x08language\x18\x02 \x01(\x0e\x32\x34.spark.connect.CreateScalarFunction.FunctionLanguageR\x08language\x12\x1c\n\ttemporary\x18\x03 \x01(\x08R\ttempora [...] + b'\n\x1cspark/connect/commands.proto\x12\rspark.connect\x1a\x19spark/connect/types.proto"i\n\x07\x43ommand\x12N\n\x0f\x63reate_function\x18\x01 \x01(\x0b\x32#.spark.connect.CreateScalarFunctionH\x00R\x0e\x63reateFunctionB\x0e\n\x0c\x63ommand_type"\x97\x04\n\x14\x43reateScalarFunction\x12\x14\n\x05parts\x18\x01 \x03(\tR\x05parts\x12P\n\x08language\x18\x02 \x01(\x0e\x32\x34.spark.connect.CreateScalarFunction.FunctionLanguageR\x08language\x12\x1c\n\ttemporary\x18\x03 \x01(\x08R\ttempora [...] ) _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) @@ -44,7 +44,7 @@ if _descriptor._USE_C_DESCRIPTORS == False: _COMMAND._serialized_start = 74 _COMMAND._serialized_end = 179 _CREATESCALARFUNCTION._serialized_start = 182 - _CREATESCALARFUNCTION._serialized_end = 709 - _CREATESCALARFUNCTION_FUNCTIONLANGUAGE._serialized_start = 547 - _CREATESCALARFUNCTION_FUNCTIONLANGUAGE._serialized_end = 686 + _CREATESCALARFUNCTION._serialized_end = 717 + _CREATESCALARFUNCTION_FUNCTIONLANGUAGE._serialized_start = 555 + _CREATESCALARFUNCTION_FUNCTIONLANGUAGE._serialized_end = 694 # @@protoc_insertion_point(module_scope) diff --git a/python/pyspark/sql/connect/proto/commands_pb2.pyi b/python/pyspark/sql/connect/proto/commands_pb2.pyi index 0973f08e6a0..6ba088061e7 100644 --- a/python/pyspark/sql/connect/proto/commands_pb2.pyi +++ b/python/pyspark/sql/connect/proto/commands_pb2.pyi @@ -135,10 +135,10 @@ class CreateScalarFunction(google.protobuf.message.Message): def argument_types( self, ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - pyspark.sql.connect.proto.types_pb2.Type + pyspark.sql.connect.proto.types_pb2.DataType ]: ... @property - def return_type(self) -> pyspark.sql.connect.proto.types_pb2.Type: ... + def return_type(self) -> pyspark.sql.connect.proto.types_pb2.DataType: ... serialized_function: builtins.bytes """As a raw string serialized:""" literal_string: builtins.str @@ -149,9 +149,9 @@ class CreateScalarFunction(google.protobuf.message.Message): parts: collections.abc.Iterable[builtins.str] | None = ..., language: global___CreateScalarFunction.FunctionLanguage.ValueType = ..., temporary: builtins.bool = ..., - argument_types: collections.abc.Iterable[pyspark.sql.connect.proto.types_pb2.Type] + argument_types: collections.abc.Iterable[pyspark.sql.connect.proto.types_pb2.DataType] | None = ..., - return_type: pyspark.sql.connect.proto.types_pb2.Type | None = ..., + return_type: pyspark.sql.connect.proto.types_pb2.DataType | None = ..., serialized_function: builtins.bytes = ..., literal_string: builtins.str = ..., ) -> None: ... diff --git a/python/pyspark/sql/connect/proto/expressions_pb2.py b/python/pyspark/sql/connect/proto/expressions_pb2.py index 68a485f0a8c..84aa22d8bde 100644 --- a/python/pyspark/sql/connect/proto/expressions_pb2.py +++ b/python/pyspark/sql/connect/proto/expressions_pb2.py @@ -33,7 +33,7 @@ from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n\x1fspark/connect/expressions.proto\x12\rspark.connect\x1a\x19spark/connect/types.proto\x1a\x19google/protobuf/any.proto"\xd8\x14\n\nExpression\x12=\n\x07literal\x18\x01 \x01(\x0b\x32!.spark.connect.Expression.LiteralH\x00R\x07literal\x12\x62\n\x14unresolved_attribute\x18\x02 \x01(\x0b\x32-.spark.connect.Expression.UnresolvedAttributeH\x00R\x13unresolvedAttribute\x12_\n\x13unresolved_function\x18\x03 \x01(\x0b\x32,.spark.connect.Expression.UnresolvedFunctionH\x00R\x12unresolvedFu [...] + b'\n\x1fspark/connect/expressions.proto\x12\rspark.connect\x1a\x19spark/connect/types.proto\x1a\x19google/protobuf/any.proto"\xa7\x17\n\nExpression\x12=\n\x07literal\x18\x01 \x01(\x0b\x32!.spark.connect.Expression.LiteralH\x00R\x07literal\x12\x62\n\x14unresolved_attribute\x18\x02 \x01(\x0b\x32-.spark.connect.Expression.UnresolvedAttributeH\x00R\x13unresolvedAttribute\x12_\n\x13unresolved_function\x18\x03 \x01(\x0b\x32,.spark.connect.Expression.UnresolvedFunctionH\x00R\x12unresolvedFu [...] ) _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) @@ -43,31 +43,37 @@ if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None DESCRIPTOR._serialized_options = b"\n\036org.apache.spark.connect.protoP\001" _EXPRESSION._serialized_start = 105 - _EXPRESSION._serialized_end = 2753 - _EXPRESSION_LITERAL._serialized_start = 471 - _EXPRESSION_LITERAL._serialized_end = 2542 - _EXPRESSION_LITERAL_VARCHAR._serialized_start = 1769 - _EXPRESSION_LITERAL_VARCHAR._serialized_end = 1824 - _EXPRESSION_LITERAL_DECIMAL._serialized_start = 1826 - _EXPRESSION_LITERAL_DECIMAL._serialized_end = 1909 - _EXPRESSION_LITERAL_MAP._serialized_start = 1912 - _EXPRESSION_LITERAL_MAP._serialized_end = 2118 - _EXPRESSION_LITERAL_MAP_KEYVALUE._serialized_start = 1998 - _EXPRESSION_LITERAL_MAP_KEYVALUE._serialized_end = 2118 - _EXPRESSION_LITERAL_INTERVALYEARTOMONTH._serialized_start = 2120 - _EXPRESSION_LITERAL_INTERVALYEARTOMONTH._serialized_end = 2187 - _EXPRESSION_LITERAL_INTERVALDAYTOSECOND._serialized_start = 2189 - _EXPRESSION_LITERAL_INTERVALDAYTOSECOND._serialized_end = 2292 - _EXPRESSION_LITERAL_STRUCT._serialized_start = 2294 - _EXPRESSION_LITERAL_STRUCT._serialized_end = 2361 - _EXPRESSION_LITERAL_LIST._serialized_start = 2363 - _EXPRESSION_LITERAL_LIST._serialized_end = 2428 - _EXPRESSION_LITERAL_USERDEFINED._serialized_start = 2430 - _EXPRESSION_LITERAL_USERDEFINED._serialized_end = 2526 - _EXPRESSION_UNRESOLVEDATTRIBUTE._serialized_start = 2544 - _EXPRESSION_UNRESOLVEDATTRIBUTE._serialized_end = 2587 - _EXPRESSION_UNRESOLVEDFUNCTION._serialized_start = 2589 - _EXPRESSION_UNRESOLVEDFUNCTION._serialized_end = 2688 - _EXPRESSION_EXPRESSIONSTRING._serialized_start = 2690 - _EXPRESSION_EXPRESSIONSTRING._serialized_end = 2740 + _EXPRESSION._serialized_end = 3088 + _EXPRESSION_LITERAL._serialized_start = 613 + _EXPRESSION_LITERAL._serialized_end = 2696 + _EXPRESSION_LITERAL_VARCHAR._serialized_start = 1923 + _EXPRESSION_LITERAL_VARCHAR._serialized_end = 1978 + _EXPRESSION_LITERAL_DECIMAL._serialized_start = 1980 + _EXPRESSION_LITERAL_DECIMAL._serialized_end = 2063 + _EXPRESSION_LITERAL_MAP._serialized_start = 2066 + _EXPRESSION_LITERAL_MAP._serialized_end = 2272 + _EXPRESSION_LITERAL_MAP_KEYVALUE._serialized_start = 2152 + _EXPRESSION_LITERAL_MAP_KEYVALUE._serialized_end = 2272 + _EXPRESSION_LITERAL_INTERVALYEARTOMONTH._serialized_start = 2274 + _EXPRESSION_LITERAL_INTERVALYEARTOMONTH._serialized_end = 2341 + _EXPRESSION_LITERAL_INTERVALDAYTOSECOND._serialized_start = 2343 + _EXPRESSION_LITERAL_INTERVALDAYTOSECOND._serialized_end = 2446 + _EXPRESSION_LITERAL_STRUCT._serialized_start = 2448 + _EXPRESSION_LITERAL_STRUCT._serialized_end = 2515 + _EXPRESSION_LITERAL_LIST._serialized_start = 2517 + _EXPRESSION_LITERAL_LIST._serialized_end = 2582 + _EXPRESSION_LITERAL_USERDEFINED._serialized_start = 2584 + _EXPRESSION_LITERAL_USERDEFINED._serialized_end = 2680 + _EXPRESSION_UNRESOLVEDATTRIBUTE._serialized_start = 2698 + _EXPRESSION_UNRESOLVEDATTRIBUTE._serialized_end = 2741 + _EXPRESSION_UNRESOLVEDFUNCTION._serialized_start = 2743 + _EXPRESSION_UNRESOLVEDFUNCTION._serialized_end = 2842 + _EXPRESSION_EXPRESSIONSTRING._serialized_start = 2844 + _EXPRESSION_EXPRESSIONSTRING._serialized_end = 2894 + _EXPRESSION_UNRESOLVEDSTAR._serialized_start = 2896 + _EXPRESSION_UNRESOLVEDSTAR._serialized_end = 2912 + _EXPRESSION_QUALIFIEDATTRIBUTE._serialized_start = 2914 + _EXPRESSION_QUALIFIEDATTRIBUTE._serialized_end = 2999 + _EXPRESSION_ALIAS._serialized_start = 3001 + _EXPRESSION_ALIAS._serialized_end = 3075 # @@protoc_insertion_point(module_scope) diff --git a/python/pyspark/sql/connect/proto/expressions_pb2.pyi b/python/pyspark/sql/connect/proto/expressions_pb2.pyi index 4b09eff9981..534c427e36b 100644 --- a/python/pyspark/sql/connect/proto/expressions_pb2.pyi +++ b/python/pyspark/sql/connect/proto/expressions_pb2.pyi @@ -318,14 +318,14 @@ class Expression(google.protobuf.message.Message): """Timestamp in units of microseconds since the UNIX epoch.""" uuid: builtins.bytes @property - def null(self) -> pyspark.sql.connect.proto.types_pb2.Type: + def null(self) -> pyspark.sql.connect.proto.types_pb2.DataType: """a typed null literal""" @property def list(self) -> global___Expression.Literal.List: ... @property - def empty_list(self) -> pyspark.sql.connect.proto.types_pb2.Type.List: ... + def empty_list(self) -> pyspark.sql.connect.proto.types_pb2.DataType.List: ... @property - def empty_map(self) -> pyspark.sql.connect.proto.types_pb2.Type.Map: ... + def empty_map(self) -> pyspark.sql.connect.proto.types_pb2.DataType.Map: ... @property def user_defined(self) -> global___Expression.Literal.UserDefined: ... nullable: builtins.bool @@ -363,10 +363,10 @@ class Expression(google.protobuf.message.Message): map: global___Expression.Literal.Map | None = ..., timestamp_tz: builtins.int = ..., uuid: builtins.bytes = ..., - null: pyspark.sql.connect.proto.types_pb2.Type | None = ..., + null: pyspark.sql.connect.proto.types_pb2.DataType | None = ..., list: global___Expression.Literal.List | None = ..., - empty_list: pyspark.sql.connect.proto.types_pb2.Type.List | None = ..., - empty_map: pyspark.sql.connect.proto.types_pb2.Type.Map | None = ..., + empty_list: pyspark.sql.connect.proto.types_pb2.DataType.List | None = ..., + empty_map: pyspark.sql.connect.proto.types_pb2.DataType.Map | None = ..., user_defined: global___Expression.Literal.UserDefined | None = ..., nullable: builtins.bool = ..., type_variation_reference: builtins.int = ..., @@ -594,10 +594,67 @@ class Expression(google.protobuf.message.Message): self, field_name: typing_extensions.Literal["expression", b"expression"] ) -> None: ... + class UnresolvedStar(google.protobuf.message.Message): + """UnresolvedStar is used to expand all the fields of a relation or struct.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + + class QualifiedAttribute(google.protobuf.message.Message): + """An qualified attribute that can specify a reference (e.g. column) without needing a resolution + by the analyzer. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + TYPE_FIELD_NUMBER: builtins.int + name: builtins.str + @property + def type(self) -> pyspark.sql.connect.proto.types_pb2.DataType: ... + def __init__( + self, + *, + name: builtins.str = ..., + type: pyspark.sql.connect.proto.types_pb2.DataType | None = ..., + ) -> None: ... + def HasField( + self, field_name: typing_extensions.Literal["type", b"type"] + ) -> builtins.bool: ... + def ClearField( + self, field_name: typing_extensions.Literal["name", b"name", "type", b"type"] + ) -> None: ... + + class Alias(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + EXPR_FIELD_NUMBER: builtins.int + NAME_FIELD_NUMBER: builtins.int + @property + def expr(self) -> global___Expression: ... + name: builtins.str + def __init__( + self, + *, + expr: global___Expression | None = ..., + name: builtins.str = ..., + ) -> None: ... + def HasField( + self, field_name: typing_extensions.Literal["expr", b"expr"] + ) -> builtins.bool: ... + def ClearField( + self, field_name: typing_extensions.Literal["expr", b"expr", "name", b"name"] + ) -> None: ... + LITERAL_FIELD_NUMBER: builtins.int UNRESOLVED_ATTRIBUTE_FIELD_NUMBER: builtins.int UNRESOLVED_FUNCTION_FIELD_NUMBER: builtins.int EXPRESSION_STRING_FIELD_NUMBER: builtins.int + UNRESOLVED_STAR_FIELD_NUMBER: builtins.int + ALIAS_FIELD_NUMBER: builtins.int @property def literal(self) -> global___Expression.Literal: ... @property @@ -606,6 +663,10 @@ class Expression(google.protobuf.message.Message): def unresolved_function(self) -> global___Expression.UnresolvedFunction: ... @property def expression_string(self) -> global___Expression.ExpressionString: ... + @property + def unresolved_star(self) -> global___Expression.UnresolvedStar: ... + @property + def alias(self) -> global___Expression.Alias: ... def __init__( self, *, @@ -613,10 +674,14 @@ class Expression(google.protobuf.message.Message): unresolved_attribute: global___Expression.UnresolvedAttribute | None = ..., unresolved_function: global___Expression.UnresolvedFunction | None = ..., expression_string: global___Expression.ExpressionString | None = ..., + unresolved_star: global___Expression.UnresolvedStar | None = ..., + alias: global___Expression.Alias | None = ..., ) -> None: ... def HasField( self, field_name: typing_extensions.Literal[ + "alias", + b"alias", "expr_type", b"expr_type", "expression_string", @@ -627,11 +692,15 @@ class Expression(google.protobuf.message.Message): b"unresolved_attribute", "unresolved_function", b"unresolved_function", + "unresolved_star", + b"unresolved_star", ], ) -> builtins.bool: ... def ClearField( self, field_name: typing_extensions.Literal[ + "alias", + b"alias", "expr_type", b"expr_type", "expression_string", @@ -642,12 +711,19 @@ class Expression(google.protobuf.message.Message): b"unresolved_attribute", "unresolved_function", b"unresolved_function", + "unresolved_star", + b"unresolved_star", ], ) -> None: ... def WhichOneof( self, oneof_group: typing_extensions.Literal["expr_type", b"expr_type"] ) -> typing_extensions.Literal[ - "literal", "unresolved_attribute", "unresolved_function", "expression_string" + "literal", + "unresolved_attribute", + "unresolved_function", + "expression_string", + "unresolved_star", + "alias", ] | None: ... global___Expression = Expression diff --git a/python/pyspark/sql/connect/proto/relations_pb2.py b/python/pyspark/sql/connect/proto/relations_pb2.py index 3bfb8ddfb4f..1280236a150 100644 --- a/python/pyspark/sql/connect/proto/relations_pb2.py +++ b/python/pyspark/sql/connect/proto/relations_pb2.py @@ -32,7 +32,7 @@ from pyspark.sql.connect.proto import expressions_pb2 as spark_dot_connect_dot_e DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n\x1dspark/connect/relations.proto\x12\rspark.connect\x1a\x1fspark/connect/expressions.proto"\xa6\x04\n\x08Relation\x12\x35\n\x06\x63ommon\x18\x01 \x01(\x0b\x32\x1d.spark.connect.RelationCommonR\x06\x63ommon\x12)\n\x04read\x18\x02 \x01(\x0b\x32\x13.spark.connect.ReadH\x00R\x04read\x12\x32\n\x07project\x18\x03 \x01(\x0b\x32\x16.spark.connect.ProjectH\x00R\x07project\x12/\n\x06\x66ilter\x18\x04 \x01(\x0b\x32\x15.spark.connect.FilterH\x00R\x06\x66ilter\x12)\n\x04join\x18\x05 \x01(\x0 [...] + b'\n\x1dspark/connect/relations.proto\x12\rspark.connect\x1a\x1fspark/connect/expressions.proto"\xed\x04\n\x08Relation\x12\x35\n\x06\x63ommon\x18\x01 \x01(\x0b\x32\x1d.spark.connect.RelationCommonR\x06\x63ommon\x12)\n\x04read\x18\x02 \x01(\x0b\x32\x13.spark.connect.ReadH\x00R\x04read\x12\x32\n\x07project\x18\x03 \x01(\x0b\x32\x16.spark.connect.ProjectH\x00R\x07project\x12/\n\x06\x66ilter\x18\x04 \x01(\x0b\x32\x15.spark.connect.FilterH\x00R\x06\x66ilter\x12)\n\x04join\x18\x05 \x01(\x0 [...] ) _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) @@ -42,45 +42,43 @@ if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None DESCRIPTOR._serialized_options = b"\n\036org.apache.spark.connect.protoP\001" _RELATION._serialized_start = 82 - _RELATION._serialized_end = 632 - _UNKNOWN._serialized_start = 634 - _UNKNOWN._serialized_end = 643 - _RELATIONCOMMON._serialized_start = 645 - _RELATIONCOMMON._serialized_end = 716 - _SQL._serialized_start = 718 - _SQL._serialized_end = 745 - _READ._serialized_start = 747 - _READ._serialized_end = 869 - _READ_NAMEDTABLE._serialized_start = 822 - _READ_NAMEDTABLE._serialized_end = 856 - _PROJECT._serialized_start = 871 - _PROJECT._serialized_end = 988 - _FILTER._serialized_start = 990 - _FILTER._serialized_end = 1102 - _JOIN._serialized_start = 1105 - _JOIN._serialized_end = 1449 - _JOIN_JOINTYPE._serialized_start = 1297 - _JOIN_JOINTYPE._serialized_end = 1449 - _UNION._serialized_start = 1452 - _UNION._serialized_end = 1657 - _UNION_UNIONTYPE._serialized_start = 1573 - _UNION_UNIONTYPE._serialized_end = 1657 - _FETCH._serialized_start = 1659 - _FETCH._serialized_end = 1759 - _AGGREGATE._serialized_start = 1762 - _AGGREGATE._serialized_end = 2285 - _AGGREGATE_GROUPINGSET._serialized_start = 1959 - _AGGREGATE_GROUPINGSET._serialized_end = 2052 - _AGGREGATE_MEASURE._serialized_start = 2055 - _AGGREGATE_MEASURE._serialized_end = 2187 - _AGGREGATE_AGGREGATEFUNCTION._serialized_start = 2189 - _AGGREGATE_AGGREGATEFUNCTION._serialized_end = 2285 - _SORT._serialized_start = 2288 - _SORT._serialized_end = 2790 - _SORT_SORTFIELD._serialized_start = 2408 - _SORT_SORTFIELD._serialized_end = 2596 - _SORT_SORTDIRECTION._serialized_start = 2598 - _SORT_SORTDIRECTION._serialized_end = 2706 - _SORT_SORTNULLS._serialized_start = 2708 - _SORT_SORTNULLS._serialized_end = 2790 + _RELATION._serialized_end = 703 + _UNKNOWN._serialized_start = 705 + _UNKNOWN._serialized_end = 714 + _RELATIONCOMMON._serialized_start = 716 + _RELATIONCOMMON._serialized_end = 787 + _SQL._serialized_start = 789 + _SQL._serialized_end = 816 + _READ._serialized_start = 818 + _READ._serialized_end = 940 + _READ_NAMEDTABLE._serialized_start = 893 + _READ_NAMEDTABLE._serialized_end = 927 + _PROJECT._serialized_start = 942 + _PROJECT._serialized_end = 1059 + _FILTER._serialized_start = 1061 + _FILTER._serialized_end = 1173 + _JOIN._serialized_start = 1176 + _JOIN._serialized_end = 1589 + _JOIN_JOINTYPE._serialized_start = 1402 + _JOIN_JOINTYPE._serialized_end = 1589 + _UNION._serialized_start = 1592 + _UNION._serialized_end = 1797 + _UNION_UNIONTYPE._serialized_start = 1713 + _UNION_UNIONTYPE._serialized_end = 1797 + _FETCH._serialized_start = 1799 + _FETCH._serialized_end = 1899 + _AGGREGATE._serialized_start = 1902 + _AGGREGATE._serialized_end = 2227 + _AGGREGATE_AGGREGATEFUNCTION._serialized_start = 2131 + _AGGREGATE_AGGREGATEFUNCTION._serialized_end = 2227 + _SORT._serialized_start = 2230 + _SORT._serialized_end = 2732 + _SORT_SORTFIELD._serialized_start = 2350 + _SORT_SORTFIELD._serialized_end = 2538 + _SORT_SORTDIRECTION._serialized_start = 2540 + _SORT_SORTDIRECTION._serialized_end = 2648 + _SORT_SORTNULLS._serialized_start = 2650 + _SORT_SORTNULLS._serialized_end = 2732 + _LOCALRELATION._serialized_start = 2734 + _LOCALRELATION._serialized_end = 2827 # @@protoc_insertion_point(module_scope) diff --git a/python/pyspark/sql/connect/proto/relations_pb2.pyi b/python/pyspark/sql/connect/proto/relations_pb2.pyi index eef36f1d60f..952f476c912 100644 --- a/python/pyspark/sql/connect/proto/relations_pb2.pyi +++ b/python/pyspark/sql/connect/proto/relations_pb2.pyi @@ -69,6 +69,7 @@ class Relation(google.protobuf.message.Message): FETCH_FIELD_NUMBER: builtins.int AGGREGATE_FIELD_NUMBER: builtins.int SQL_FIELD_NUMBER: builtins.int + LOCAL_RELATION_FIELD_NUMBER: builtins.int UNKNOWN_FIELD_NUMBER: builtins.int @property def common(self) -> global___RelationCommon: ... @@ -91,6 +92,8 @@ class Relation(google.protobuf.message.Message): @property def sql(self) -> global___SQL: ... @property + def local_relation(self) -> global___LocalRelation: ... + @property def unknown(self) -> global___Unknown: ... def __init__( self, @@ -105,6 +108,7 @@ class Relation(google.protobuf.message.Message): fetch: global___Fetch | None = ..., aggregate: global___Aggregate | None = ..., sql: global___SQL | None = ..., + local_relation: global___LocalRelation | None = ..., unknown: global___Unknown | None = ..., ) -> None: ... def HasField( @@ -120,6 +124,8 @@ class Relation(google.protobuf.message.Message): b"filter", "join", b"join", + "local_relation", + b"local_relation", "project", b"project", "read", @@ -149,6 +155,8 @@ class Relation(google.protobuf.message.Message): b"filter", "join", b"join", + "local_relation", + b"local_relation", "project", b"project", "read", @@ -168,7 +176,17 @@ class Relation(google.protobuf.message.Message): def WhichOneof( self, oneof_group: typing_extensions.Literal["rel_type", b"rel_type"] ) -> typing_extensions.Literal[ - "read", "project", "filter", "join", "union", "sort", "fetch", "aggregate", "sql", "unknown" + "read", + "project", + "filter", + "join", + "union", + "sort", + "fetch", + "aggregate", + "sql", + "local_relation", + "unknown", ] | None: ... global___Relation = Relation @@ -353,45 +371,57 @@ class Join(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor JOIN_TYPE_UNSPECIFIED: Join._JoinType.ValueType # 0 JOIN_TYPE_INNER: Join._JoinType.ValueType # 1 - JOIN_TYPE_OUTER: Join._JoinType.ValueType # 2 + JOIN_TYPE_FULL_OUTER: Join._JoinType.ValueType # 2 JOIN_TYPE_LEFT_OUTER: Join._JoinType.ValueType # 3 JOIN_TYPE_RIGHT_OUTER: Join._JoinType.ValueType # 4 - JOIN_TYPE_ANTI: Join._JoinType.ValueType # 5 + JOIN_TYPE_LEFT_ANTI: Join._JoinType.ValueType # 5 + JOIN_TYPE_LEFT_SEMI: Join._JoinType.ValueType # 6 class JoinType(_JoinType, metaclass=_JoinTypeEnumTypeWrapper): ... JOIN_TYPE_UNSPECIFIED: Join.JoinType.ValueType # 0 JOIN_TYPE_INNER: Join.JoinType.ValueType # 1 - JOIN_TYPE_OUTER: Join.JoinType.ValueType # 2 + JOIN_TYPE_FULL_OUTER: Join.JoinType.ValueType # 2 JOIN_TYPE_LEFT_OUTER: Join.JoinType.ValueType # 3 JOIN_TYPE_RIGHT_OUTER: Join.JoinType.ValueType # 4 - JOIN_TYPE_ANTI: Join.JoinType.ValueType # 5 + JOIN_TYPE_LEFT_ANTI: Join.JoinType.ValueType # 5 + JOIN_TYPE_LEFT_SEMI: Join.JoinType.ValueType # 6 LEFT_FIELD_NUMBER: builtins.int RIGHT_FIELD_NUMBER: builtins.int - ON_FIELD_NUMBER: builtins.int - HOW_FIELD_NUMBER: builtins.int + JOIN_CONDITION_FIELD_NUMBER: builtins.int + JOIN_TYPE_FIELD_NUMBER: builtins.int @property def left(self) -> global___Relation: ... @property def right(self) -> global___Relation: ... @property - def on(self) -> pyspark.sql.connect.proto.expressions_pb2.Expression: ... - how: global___Join.JoinType.ValueType + def join_condition(self) -> pyspark.sql.connect.proto.expressions_pb2.Expression: ... + join_type: global___Join.JoinType.ValueType def __init__( self, *, left: global___Relation | None = ..., right: global___Relation | None = ..., - on: pyspark.sql.connect.proto.expressions_pb2.Expression | None = ..., - how: global___Join.JoinType.ValueType = ..., + join_condition: pyspark.sql.connect.proto.expressions_pb2.Expression | None = ..., + join_type: global___Join.JoinType.ValueType = ..., ) -> None: ... def HasField( - self, field_name: typing_extensions.Literal["left", b"left", "on", b"on", "right", b"right"] + self, + field_name: typing_extensions.Literal[ + "join_condition", b"join_condition", "left", b"left", "right", b"right" + ], ) -> builtins.bool: ... def ClearField( self, field_name: typing_extensions.Literal[ - "how", b"how", "left", b"left", "on", b"on", "right", b"right" + "join_condition", + b"join_condition", + "join_type", + b"join_type", + "left", + b"left", + "right", + b"right", ], ) -> None: ... @@ -476,56 +506,6 @@ class Aggregate(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - class GroupingSet(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - AGGREGATE_EXPRESSIONS_FIELD_NUMBER: builtins.int - @property - def aggregate_expressions( - self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - pyspark.sql.connect.proto.expressions_pb2.Expression - ]: ... - def __init__( - self, - *, - aggregate_expressions: collections.abc.Iterable[ - pyspark.sql.connect.proto.expressions_pb2.Expression - ] - | None = ..., - ) -> None: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "aggregate_expressions", b"aggregate_expressions" - ], - ) -> None: ... - - class Measure(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - FUNCTION_FIELD_NUMBER: builtins.int - FILTER_FIELD_NUMBER: builtins.int - @property - def function(self) -> global___Aggregate.AggregateFunction: ... - @property - def filter(self) -> pyspark.sql.connect.proto.expressions_pb2.Expression: - """Conditional filter for SUM(x FILTER WHERE x < 10)""" - def __init__( - self, - *, - function: global___Aggregate.AggregateFunction | None = ..., - filter: pyspark.sql.connect.proto.expressions_pb2.Expression | None = ..., - ) -> None: ... - def HasField( - self, - field_name: typing_extensions.Literal["filter", b"filter", "function", b"function"], - ) -> builtins.bool: ... - def ClearField( - self, - field_name: typing_extensions.Literal["filter", b"filter", "function", b"function"], - ) -> None: ... - class AggregateFunction(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -552,30 +532,32 @@ class Aggregate(google.protobuf.message.Message): ) -> None: ... INPUT_FIELD_NUMBER: builtins.int - GROUPING_SETS_FIELD_NUMBER: builtins.int - MEASURES_FIELD_NUMBER: builtins.int + GROUPING_EXPRESSIONS_FIELD_NUMBER: builtins.int + RESULT_EXPRESSIONS_FIELD_NUMBER: builtins.int @property def input(self) -> global___Relation: ... @property - def grouping_sets( + def grouping_expressions( self, ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - global___Aggregate.GroupingSet - ]: - """Grouping sets are used in rollups""" + pyspark.sql.connect.proto.expressions_pb2.Expression + ]: ... @property - def measures( + def result_expressions( self, ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ - global___Aggregate.Measure - ]: - """Measures""" + global___Aggregate.AggregateFunction + ]: ... def __init__( self, *, input: global___Relation | None = ..., - grouping_sets: collections.abc.Iterable[global___Aggregate.GroupingSet] | None = ..., - measures: collections.abc.Iterable[global___Aggregate.Measure] | None = ..., + grouping_expressions: collections.abc.Iterable[ + pyspark.sql.connect.proto.expressions_pb2.Expression + ] + | None = ..., + result_expressions: collections.abc.Iterable[global___Aggregate.AggregateFunction] + | None = ..., ) -> None: ... def HasField( self, field_name: typing_extensions.Literal["input", b"input"] @@ -583,7 +565,12 @@ class Aggregate(google.protobuf.message.Message): def ClearField( self, field_name: typing_extensions.Literal[ - "grouping_sets", b"grouping_sets", "input", b"input", "measures", b"measures" + "grouping_expressions", + b"grouping_expressions", + "input", + b"input", + "result_expressions", + b"result_expressions", ], ) -> None: ... @@ -682,3 +669,28 @@ class Sort(google.protobuf.message.Message): ) -> None: ... global___Sort = Sort + +class LocalRelation(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ATTRIBUTES_FIELD_NUMBER: builtins.int + @property + def attributes( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + pyspark.sql.connect.proto.expressions_pb2.Expression.QualifiedAttribute + ]: + """TODO: support local data.""" + def __init__( + self, + *, + attributes: collections.abc.Iterable[ + pyspark.sql.connect.proto.expressions_pb2.Expression.QualifiedAttribute + ] + | None = ..., + ) -> None: ... + def ClearField( + self, field_name: typing_extensions.Literal["attributes", b"attributes"] + ) -> None: ... + +global___LocalRelation = LocalRelation diff --git a/python/pyspark/sql/connect/proto/types_pb2.py b/python/pyspark/sql/connect/proto/types_pb2.py index dcaf641b855..dedc6b4b578 100644 --- a/python/pyspark/sql/connect/proto/types_pb2.py +++ b/python/pyspark/sql/connect/proto/types_pb2.py @@ -29,7 +29,7 @@ _sym_db = _symbol_database.Default() DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n\x19spark/connect/types.proto\x12\rspark.connect"\xea%\n\x04Type\x12\x31\n\x04\x62ool\x18\x01 \x01(\x0b\x32\x1b.spark.connect.Type.BooleanH\x00R\x04\x62ool\x12(\n\x02i8\x18\x02 \x01(\x0b\x32\x16.spark.connect.Type.I8H\x00R\x02i8\x12+\n\x03i16\x18\x03 \x01(\x0b\x32\x17.spark.connect.Type.I16H\x00R\x03i16\x12+\n\x03i32\x18\x05 \x01(\x0b\x32\x17.spark.connect.Type.I32H\x00R\x03i32\x12+\n\x03i64\x18\x07 \x01(\x0b\x32\x17.spark.connect.Type.I64H\x00R\x03i64\x12.\n\x04\x66p32\x18\n \x0 [...] + b'\n\x19spark/connect/types.proto\x12\rspark.connect"\xbe\'\n\x08\x44\x61taType\x12\x35\n\x04\x62ool\x18\x01 \x01(\x0b\x32\x1f.spark.connect.DataType.BooleanH\x00R\x04\x62ool\x12,\n\x02i8\x18\x02 \x01(\x0b\x32\x1a.spark.connect.DataType.I8H\x00R\x02i8\x12/\n\x03i16\x18\x03 \x01(\x0b\x32\x1b.spark.connect.DataType.I16H\x00R\x03i16\x12/\n\x03i32\x18\x05 \x01(\x0b\x32\x1b.spark.connect.DataType.I32H\x00R\x03i32\x12/\n\x03i64\x18\x07 \x01(\x0b\x32\x1b.spark.connect.DataType.I64H\x00R\x03 [...] ) _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) @@ -38,54 +38,54 @@ if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None DESCRIPTOR._serialized_options = b"\n\036org.apache.spark.connect.protoP\001" - _TYPE._serialized_start = 45 - _TYPE._serialized_end = 4887 - _TYPE_BOOLEAN._serialized_start = 1366 - _TYPE_BOOLEAN._serialized_end = 1500 - _TYPE_I8._serialized_start = 1503 - _TYPE_I8._serialized_end = 1632 - _TYPE_I16._serialized_start = 1635 - _TYPE_I16._serialized_end = 1765 - _TYPE_I32._serialized_start = 1768 - _TYPE_I32._serialized_end = 1898 - _TYPE_I64._serialized_start = 1901 - _TYPE_I64._serialized_end = 2031 - _TYPE_FP32._serialized_start = 2034 - _TYPE_FP32._serialized_end = 2165 - _TYPE_FP64._serialized_start = 2168 - _TYPE_FP64._serialized_end = 2299 - _TYPE_STRING._serialized_start = 2302 - _TYPE_STRING._serialized_end = 2435 - _TYPE_BINARY._serialized_start = 2438 - _TYPE_BINARY._serialized_end = 2571 - _TYPE_TIMESTAMP._serialized_start = 2574 - _TYPE_TIMESTAMP._serialized_end = 2710 - _TYPE_DATE._serialized_start = 2713 - _TYPE_DATE._serialized_end = 2844 - _TYPE_TIME._serialized_start = 2847 - _TYPE_TIME._serialized_end = 2978 - _TYPE_TIMESTAMPTZ._serialized_start = 2981 - _TYPE_TIMESTAMPTZ._serialized_end = 3119 - _TYPE_INTERVALYEAR._serialized_start = 3122 - _TYPE_INTERVALYEAR._serialized_end = 3261 - _TYPE_INTERVALDAY._serialized_start = 3264 - _TYPE_INTERVALDAY._serialized_end = 3402 - _TYPE_UUID._serialized_start = 3405 - _TYPE_UUID._serialized_end = 3536 - _TYPE_FIXEDCHAR._serialized_start = 3539 - _TYPE_FIXEDCHAR._serialized_end = 3699 - _TYPE_VARCHAR._serialized_start = 3702 - _TYPE_VARCHAR._serialized_end = 3860 - _TYPE_FIXEDBINARY._serialized_start = 3863 - _TYPE_FIXEDBINARY._serialized_end = 4025 - _TYPE_DECIMAL._serialized_start = 4028 - _TYPE_DECIMAL._serialized_end = 4214 - _TYPE_STRUCT._serialized_start = 4217 - _TYPE_STRUCT._serialized_end = 4393 - _TYPE_LIST._serialized_start = 4396 - _TYPE_LIST._serialized_end = 4568 - _TYPE_MAP._serialized_start = 4571 - _TYPE_MAP._serialized_end = 4783 - _TYPE_NULLABILITY._serialized_start = 4785 - _TYPE_NULLABILITY._serialized_end = 4879 + _DATATYPE._serialized_start = 45 + _DATATYPE._serialized_end = 5099 + _DATATYPE_BOOLEAN._serialized_start = 1462 + _DATATYPE_BOOLEAN._serialized_end = 1600 + _DATATYPE_I8._serialized_start = 1603 + _DATATYPE_I8._serialized_end = 1736 + _DATATYPE_I16._serialized_start = 1739 + _DATATYPE_I16._serialized_end = 1873 + _DATATYPE_I32._serialized_start = 1876 + _DATATYPE_I32._serialized_end = 2010 + _DATATYPE_I64._serialized_start = 2013 + _DATATYPE_I64._serialized_end = 2147 + _DATATYPE_FP32._serialized_start = 2150 + _DATATYPE_FP32._serialized_end = 2285 + _DATATYPE_FP64._serialized_start = 2288 + _DATATYPE_FP64._serialized_end = 2423 + _DATATYPE_STRING._serialized_start = 2426 + _DATATYPE_STRING._serialized_end = 2563 + _DATATYPE_BINARY._serialized_start = 2566 + _DATATYPE_BINARY._serialized_end = 2703 + _DATATYPE_TIMESTAMP._serialized_start = 2706 + _DATATYPE_TIMESTAMP._serialized_end = 2846 + _DATATYPE_DATE._serialized_start = 2849 + _DATATYPE_DATE._serialized_end = 2984 + _DATATYPE_TIME._serialized_start = 2987 + _DATATYPE_TIME._serialized_end = 3122 + _DATATYPE_TIMESTAMPTZ._serialized_start = 3125 + _DATATYPE_TIMESTAMPTZ._serialized_end = 3267 + _DATATYPE_INTERVALYEAR._serialized_start = 3270 + _DATATYPE_INTERVALYEAR._serialized_end = 3413 + _DATATYPE_INTERVALDAY._serialized_start = 3416 + _DATATYPE_INTERVALDAY._serialized_end = 3558 + _DATATYPE_UUID._serialized_start = 3561 + _DATATYPE_UUID._serialized_end = 3696 + _DATATYPE_FIXEDCHAR._serialized_start = 3699 + _DATATYPE_FIXEDCHAR._serialized_end = 3863 + _DATATYPE_VARCHAR._serialized_start = 3866 + _DATATYPE_VARCHAR._serialized_end = 4028 + _DATATYPE_FIXEDBINARY._serialized_start = 4031 + _DATATYPE_FIXEDBINARY._serialized_end = 4197 + _DATATYPE_DECIMAL._serialized_start = 4200 + _DATATYPE_DECIMAL._serialized_end = 4390 + _DATATYPE_STRUCT._serialized_start = 4393 + _DATATYPE_STRUCT._serialized_end = 4577 + _DATATYPE_LIST._serialized_start = 4580 + _DATATYPE_LIST._serialized_end = 4768 + _DATATYPE_MAP._serialized_start = 4771 + _DATATYPE_MAP._serialized_end = 4995 + _DATATYPE_NULLABILITY._serialized_start = 4997 + _DATATYPE_NULLABILITY._serialized_end = 5091 # @@protoc_insertion_point(module_scope) diff --git a/python/pyspark/sql/connect/proto/types_pb2.pyi b/python/pyspark/sql/connect/proto/types_pb2.pyi index e7b8bcaeee2..8486053b435 100644 --- a/python/pyspark/sql/connect/proto/types_pb2.pyi +++ b/python/pyspark/sql/connect/proto/types_pb2.pyi @@ -49,8 +49,8 @@ else: DESCRIPTOR: google.protobuf.descriptor.FileDescriptor -class Type(google.protobuf.message.Message): - """This message describes the logical [[Type]] of something. It does not carry the value +class DataType(google.protobuf.message.Message): + """This message describes the logical [[DataType]] of something. It does not carry the value itself but only describes it. """ @@ -61,18 +61,20 @@ class Type(google.protobuf.message.Message): V: typing_extensions.TypeAlias = ValueType class _NullabilityEnumTypeWrapper( - google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Type._Nullability.ValueType], + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ + DataType._Nullability.ValueType + ], builtins.type, ): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor - NULLABILITY_UNSPECIFIED: Type._Nullability.ValueType # 0 - NULLABILITY_NULLABLE: Type._Nullability.ValueType # 1 - NULLABILITY_REQUIRED: Type._Nullability.ValueType # 2 + NULLABILITY_UNSPECIFIED: DataType._Nullability.ValueType # 0 + NULLABILITY_NULLABLE: DataType._Nullability.ValueType # 1 + NULLABILITY_REQUIRED: DataType._Nullability.ValueType # 2 class Nullability(_Nullability, metaclass=_NullabilityEnumTypeWrapper): ... - NULLABILITY_UNSPECIFIED: Type.Nullability.ValueType # 0 - NULLABILITY_NULLABLE: Type.Nullability.ValueType # 1 - NULLABILITY_REQUIRED: Type.Nullability.ValueType # 2 + NULLABILITY_UNSPECIFIED: DataType.Nullability.ValueType # 0 + NULLABILITY_NULLABLE: DataType.Nullability.ValueType # 1 + NULLABILITY_REQUIRED: DataType.Nullability.ValueType # 2 class Boolean(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -80,12 +82,12 @@ class Type(google.protobuf.message.Message): TYPE_VARIATION_REFERENCE_FIELD_NUMBER: builtins.int NULLABILITY_FIELD_NUMBER: builtins.int type_variation_reference: builtins.int - nullability: global___Type.Nullability.ValueType + nullability: global___DataType.Nullability.ValueType def __init__( self, *, type_variation_reference: builtins.int = ..., - nullability: global___Type.Nullability.ValueType = ..., + nullability: global___DataType.Nullability.ValueType = ..., ) -> None: ... def ClearField( self, @@ -103,12 +105,12 @@ class Type(google.protobuf.message.Message): TYPE_VARIATION_REFERENCE_FIELD_NUMBER: builtins.int NULLABILITY_FIELD_NUMBER: builtins.int type_variation_reference: builtins.int - nullability: global___Type.Nullability.ValueType + nullability: global___DataType.Nullability.ValueType def __init__( self, *, type_variation_reference: builtins.int = ..., - nullability: global___Type.Nullability.ValueType = ..., + nullability: global___DataType.Nullability.ValueType = ..., ) -> None: ... def ClearField( self, @@ -126,12 +128,12 @@ class Type(google.protobuf.message.Message): TYPE_VARIATION_REFERENCE_FIELD_NUMBER: builtins.int NULLABILITY_FIELD_NUMBER: builtins.int type_variation_reference: builtins.int - nullability: global___Type.Nullability.ValueType + nullability: global___DataType.Nullability.ValueType def __init__( self, *, type_variation_reference: builtins.int = ..., - nullability: global___Type.Nullability.ValueType = ..., + nullability: global___DataType.Nullability.ValueType = ..., ) -> None: ... def ClearField( self, @@ -149,12 +151,12 @@ class Type(google.protobuf.message.Message): TYPE_VARIATION_REFERENCE_FIELD_NUMBER: builtins.int NULLABILITY_FIELD_NUMBER: builtins.int type_variation_reference: builtins.int - nullability: global___Type.Nullability.ValueType + nullability: global___DataType.Nullability.ValueType def __init__( self, *, type_variation_reference: builtins.int = ..., - nullability: global___Type.Nullability.ValueType = ..., + nullability: global___DataType.Nullability.ValueType = ..., ) -> None: ... def ClearField( self, @@ -172,12 +174,12 @@ class Type(google.protobuf.message.Message): TYPE_VARIATION_REFERENCE_FIELD_NUMBER: builtins.int NULLABILITY_FIELD_NUMBER: builtins.int type_variation_reference: builtins.int - nullability: global___Type.Nullability.ValueType + nullability: global___DataType.Nullability.ValueType def __init__( self, *, type_variation_reference: builtins.int = ..., - nullability: global___Type.Nullability.ValueType = ..., + nullability: global___DataType.Nullability.ValueType = ..., ) -> None: ... def ClearField( self, @@ -195,12 +197,12 @@ class Type(google.protobuf.message.Message): TYPE_VARIATION_REFERENCE_FIELD_NUMBER: builtins.int NULLABILITY_FIELD_NUMBER: builtins.int type_variation_reference: builtins.int - nullability: global___Type.Nullability.ValueType + nullability: global___DataType.Nullability.ValueType def __init__( self, *, type_variation_reference: builtins.int = ..., - nullability: global___Type.Nullability.ValueType = ..., + nullability: global___DataType.Nullability.ValueType = ..., ) -> None: ... def ClearField( self, @@ -218,12 +220,12 @@ class Type(google.protobuf.message.Message): TYPE_VARIATION_REFERENCE_FIELD_NUMBER: builtins.int NULLABILITY_FIELD_NUMBER: builtins.int type_variation_reference: builtins.int - nullability: global___Type.Nullability.ValueType + nullability: global___DataType.Nullability.ValueType def __init__( self, *, type_variation_reference: builtins.int = ..., - nullability: global___Type.Nullability.ValueType = ..., + nullability: global___DataType.Nullability.ValueType = ..., ) -> None: ... def ClearField( self, @@ -241,12 +243,12 @@ class Type(google.protobuf.message.Message): TYPE_VARIATION_REFERENCE_FIELD_NUMBER: builtins.int NULLABILITY_FIELD_NUMBER: builtins.int type_variation_reference: builtins.int - nullability: global___Type.Nullability.ValueType + nullability: global___DataType.Nullability.ValueType def __init__( self, *, type_variation_reference: builtins.int = ..., - nullability: global___Type.Nullability.ValueType = ..., + nullability: global___DataType.Nullability.ValueType = ..., ) -> None: ... def ClearField( self, @@ -264,12 +266,12 @@ class Type(google.protobuf.message.Message): TYPE_VARIATION_REFERENCE_FIELD_NUMBER: builtins.int NULLABILITY_FIELD_NUMBER: builtins.int type_variation_reference: builtins.int - nullability: global___Type.Nullability.ValueType + nullability: global___DataType.Nullability.ValueType def __init__( self, *, type_variation_reference: builtins.int = ..., - nullability: global___Type.Nullability.ValueType = ..., + nullability: global___DataType.Nullability.ValueType = ..., ) -> None: ... def ClearField( self, @@ -287,12 +289,12 @@ class Type(google.protobuf.message.Message): TYPE_VARIATION_REFERENCE_FIELD_NUMBER: builtins.int NULLABILITY_FIELD_NUMBER: builtins.int type_variation_reference: builtins.int - nullability: global___Type.Nullability.ValueType + nullability: global___DataType.Nullability.ValueType def __init__( self, *, type_variation_reference: builtins.int = ..., - nullability: global___Type.Nullability.ValueType = ..., + nullability: global___DataType.Nullability.ValueType = ..., ) -> None: ... def ClearField( self, @@ -310,12 +312,12 @@ class Type(google.protobuf.message.Message): TYPE_VARIATION_REFERENCE_FIELD_NUMBER: builtins.int NULLABILITY_FIELD_NUMBER: builtins.int type_variation_reference: builtins.int - nullability: global___Type.Nullability.ValueType + nullability: global___DataType.Nullability.ValueType def __init__( self, *, type_variation_reference: builtins.int = ..., - nullability: global___Type.Nullability.ValueType = ..., + nullability: global___DataType.Nullability.ValueType = ..., ) -> None: ... def ClearField( self, @@ -333,12 +335,12 @@ class Type(google.protobuf.message.Message): TYPE_VARIATION_REFERENCE_FIELD_NUMBER: builtins.int NULLABILITY_FIELD_NUMBER: builtins.int type_variation_reference: builtins.int - nullability: global___Type.Nullability.ValueType + nullability: global___DataType.Nullability.ValueType def __init__( self, *, type_variation_reference: builtins.int = ..., - nullability: global___Type.Nullability.ValueType = ..., + nullability: global___DataType.Nullability.ValueType = ..., ) -> None: ... def ClearField( self, @@ -356,12 +358,12 @@ class Type(google.protobuf.message.Message): TYPE_VARIATION_REFERENCE_FIELD_NUMBER: builtins.int NULLABILITY_FIELD_NUMBER: builtins.int type_variation_reference: builtins.int - nullability: global___Type.Nullability.ValueType + nullability: global___DataType.Nullability.ValueType def __init__( self, *, type_variation_reference: builtins.int = ..., - nullability: global___Type.Nullability.ValueType = ..., + nullability: global___DataType.Nullability.ValueType = ..., ) -> None: ... def ClearField( self, @@ -379,12 +381,12 @@ class Type(google.protobuf.message.Message): TYPE_VARIATION_REFERENCE_FIELD_NUMBER: builtins.int NULLABILITY_FIELD_NUMBER: builtins.int type_variation_reference: builtins.int - nullability: global___Type.Nullability.ValueType + nullability: global___DataType.Nullability.ValueType def __init__( self, *, type_variation_reference: builtins.int = ..., - nullability: global___Type.Nullability.ValueType = ..., + nullability: global___DataType.Nullability.ValueType = ..., ) -> None: ... def ClearField( self, @@ -402,12 +404,12 @@ class Type(google.protobuf.message.Message): TYPE_VARIATION_REFERENCE_FIELD_NUMBER: builtins.int NULLABILITY_FIELD_NUMBER: builtins.int type_variation_reference: builtins.int - nullability: global___Type.Nullability.ValueType + nullability: global___DataType.Nullability.ValueType def __init__( self, *, type_variation_reference: builtins.int = ..., - nullability: global___Type.Nullability.ValueType = ..., + nullability: global___DataType.Nullability.ValueType = ..., ) -> None: ... def ClearField( self, @@ -425,12 +427,12 @@ class Type(google.protobuf.message.Message): TYPE_VARIATION_REFERENCE_FIELD_NUMBER: builtins.int NULLABILITY_FIELD_NUMBER: builtins.int type_variation_reference: builtins.int - nullability: global___Type.Nullability.ValueType + nullability: global___DataType.Nullability.ValueType def __init__( self, *, type_variation_reference: builtins.int = ..., - nullability: global___Type.Nullability.ValueType = ..., + nullability: global___DataType.Nullability.ValueType = ..., ) -> None: ... def ClearField( self, @@ -452,13 +454,13 @@ class Type(google.protobuf.message.Message): NULLABILITY_FIELD_NUMBER: builtins.int length: builtins.int type_variation_reference: builtins.int - nullability: global___Type.Nullability.ValueType + nullability: global___DataType.Nullability.ValueType def __init__( self, *, length: builtins.int = ..., type_variation_reference: builtins.int = ..., - nullability: global___Type.Nullability.ValueType = ..., + nullability: global___DataType.Nullability.ValueType = ..., ) -> None: ... def ClearField( self, @@ -480,13 +482,13 @@ class Type(google.protobuf.message.Message): NULLABILITY_FIELD_NUMBER: builtins.int length: builtins.int type_variation_reference: builtins.int - nullability: global___Type.Nullability.ValueType + nullability: global___DataType.Nullability.ValueType def __init__( self, *, length: builtins.int = ..., type_variation_reference: builtins.int = ..., - nullability: global___Type.Nullability.ValueType = ..., + nullability: global___DataType.Nullability.ValueType = ..., ) -> None: ... def ClearField( self, @@ -508,13 +510,13 @@ class Type(google.protobuf.message.Message): NULLABILITY_FIELD_NUMBER: builtins.int length: builtins.int type_variation_reference: builtins.int - nullability: global___Type.Nullability.ValueType + nullability: global___DataType.Nullability.ValueType def __init__( self, *, length: builtins.int = ..., type_variation_reference: builtins.int = ..., - nullability: global___Type.Nullability.ValueType = ..., + nullability: global___DataType.Nullability.ValueType = ..., ) -> None: ... def ClearField( self, @@ -538,14 +540,14 @@ class Type(google.protobuf.message.Message): scale: builtins.int precision: builtins.int type_variation_reference: builtins.int - nullability: global___Type.Nullability.ValueType + nullability: global___DataType.Nullability.ValueType def __init__( self, *, scale: builtins.int = ..., precision: builtins.int = ..., type_variation_reference: builtins.int = ..., - nullability: global___Type.Nullability.ValueType = ..., + nullability: global___DataType.Nullability.ValueType = ..., ) -> None: ... def ClearField( self, @@ -570,15 +572,17 @@ class Type(google.protobuf.message.Message): @property def types( self, - ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Type]: ... + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___DataType + ]: ... type_variation_reference: builtins.int - nullability: global___Type.Nullability.ValueType + nullability: global___DataType.Nullability.ValueType def __init__( self, *, - types: collections.abc.Iterable[global___Type] | None = ..., + types: collections.abc.Iterable[global___DataType] | None = ..., type_variation_reference: builtins.int = ..., - nullability: global___Type.Nullability.ValueType = ..., + nullability: global___DataType.Nullability.ValueType = ..., ) -> None: ... def ClearField( self, @@ -595,30 +599,30 @@ class Type(google.protobuf.message.Message): class List(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - TYPE_FIELD_NUMBER: builtins.int + DATATYPE_FIELD_NUMBER: builtins.int TYPE_VARIATION_REFERENCE_FIELD_NUMBER: builtins.int NULLABILITY_FIELD_NUMBER: builtins.int @property - def type(self) -> global___Type: ... + def DataType(self) -> global___DataType: ... type_variation_reference: builtins.int - nullability: global___Type.Nullability.ValueType + nullability: global___DataType.Nullability.ValueType def __init__( self, *, - type: global___Type | None = ..., + DataType: global___DataType | None = ..., type_variation_reference: builtins.int = ..., - nullability: global___Type.Nullability.ValueType = ..., + nullability: global___DataType.Nullability.ValueType = ..., ) -> None: ... def HasField( - self, field_name: typing_extensions.Literal["type", b"type"] + self, field_name: typing_extensions.Literal["DataType", b"DataType"] ) -> builtins.bool: ... def ClearField( self, field_name: typing_extensions.Literal[ + "DataType", + b"DataType", "nullability", b"nullability", - "type", - b"type", "type_variation_reference", b"type_variation_reference", ], @@ -632,18 +636,18 @@ class Type(google.protobuf.message.Message): TYPE_VARIATION_REFERENCE_FIELD_NUMBER: builtins.int NULLABILITY_FIELD_NUMBER: builtins.int @property - def key(self) -> global___Type: ... + def key(self) -> global___DataType: ... @property - def value(self) -> global___Type: ... + def value(self) -> global___DataType: ... type_variation_reference: builtins.int - nullability: global___Type.Nullability.ValueType + nullability: global___DataType.Nullability.ValueType def __init__( self, *, - key: global___Type | None = ..., - value: global___Type | None = ..., + key: global___DataType | None = ..., + value: global___DataType | None = ..., type_variation_reference: builtins.int = ..., - nullability: global___Type.Nullability.ValueType = ..., + nullability: global___DataType.Nullability.ValueType = ..., ) -> None: ... def HasField( self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"] @@ -687,78 +691,78 @@ class Type(google.protobuf.message.Message): MAP_FIELD_NUMBER: builtins.int USER_DEFINED_TYPE_REFERENCE_FIELD_NUMBER: builtins.int @property - def bool(self) -> global___Type.Boolean: ... + def bool(self) -> global___DataType.Boolean: ... @property - def i8(self) -> global___Type.I8: ... + def i8(self) -> global___DataType.I8: ... @property - def i16(self) -> global___Type.I16: ... + def i16(self) -> global___DataType.I16: ... @property - def i32(self) -> global___Type.I32: ... + def i32(self) -> global___DataType.I32: ... @property - def i64(self) -> global___Type.I64: ... + def i64(self) -> global___DataType.I64: ... @property - def fp32(self) -> global___Type.FP32: ... + def fp32(self) -> global___DataType.FP32: ... @property - def fp64(self) -> global___Type.FP64: ... + def fp64(self) -> global___DataType.FP64: ... @property - def string(self) -> global___Type.String: ... + def string(self) -> global___DataType.String: ... @property - def binary(self) -> global___Type.Binary: ... + def binary(self) -> global___DataType.Binary: ... @property - def timestamp(self) -> global___Type.Timestamp: ... + def timestamp(self) -> global___DataType.Timestamp: ... @property - def date(self) -> global___Type.Date: ... + def date(self) -> global___DataType.Date: ... @property - def time(self) -> global___Type.Time: ... + def time(self) -> global___DataType.Time: ... @property - def interval_year(self) -> global___Type.IntervalYear: ... + def interval_year(self) -> global___DataType.IntervalYear: ... @property - def interval_day(self) -> global___Type.IntervalDay: ... + def interval_day(self) -> global___DataType.IntervalDay: ... @property - def timestamp_tz(self) -> global___Type.TimestampTZ: ... + def timestamp_tz(self) -> global___DataType.TimestampTZ: ... @property - def uuid(self) -> global___Type.UUID: ... + def uuid(self) -> global___DataType.UUID: ... @property - def fixed_char(self) -> global___Type.FixedChar: ... + def fixed_char(self) -> global___DataType.FixedChar: ... @property - def varchar(self) -> global___Type.VarChar: ... + def varchar(self) -> global___DataType.VarChar: ... @property - def fixed_binary(self) -> global___Type.FixedBinary: ... + def fixed_binary(self) -> global___DataType.FixedBinary: ... @property - def decimal(self) -> global___Type.Decimal: ... + def decimal(self) -> global___DataType.Decimal: ... @property - def struct(self) -> global___Type.Struct: ... + def struct(self) -> global___DataType.Struct: ... @property - def list(self) -> global___Type.List: ... + def list(self) -> global___DataType.List: ... @property - def map(self) -> global___Type.Map: ... + def map(self) -> global___DataType.Map: ... user_defined_type_reference: builtins.int def __init__( self, *, - bool: global___Type.Boolean | None = ..., - i8: global___Type.I8 | None = ..., - i16: global___Type.I16 | None = ..., - i32: global___Type.I32 | None = ..., - i64: global___Type.I64 | None = ..., - fp32: global___Type.FP32 | None = ..., - fp64: global___Type.FP64 | None = ..., - string: global___Type.String | None = ..., - binary: global___Type.Binary | None = ..., - timestamp: global___Type.Timestamp | None = ..., - date: global___Type.Date | None = ..., - time: global___Type.Time | None = ..., - interval_year: global___Type.IntervalYear | None = ..., - interval_day: global___Type.IntervalDay | None = ..., - timestamp_tz: global___Type.TimestampTZ | None = ..., - uuid: global___Type.UUID | None = ..., - fixed_char: global___Type.FixedChar | None = ..., - varchar: global___Type.VarChar | None = ..., - fixed_binary: global___Type.FixedBinary | None = ..., - decimal: global___Type.Decimal | None = ..., - struct: global___Type.Struct | None = ..., - list: global___Type.List | None = ..., - map: global___Type.Map | None = ..., + bool: global___DataType.Boolean | None = ..., + i8: global___DataType.I8 | None = ..., + i16: global___DataType.I16 | None = ..., + i32: global___DataType.I32 | None = ..., + i64: global___DataType.I64 | None = ..., + fp32: global___DataType.FP32 | None = ..., + fp64: global___DataType.FP64 | None = ..., + string: global___DataType.String | None = ..., + binary: global___DataType.Binary | None = ..., + timestamp: global___DataType.Timestamp | None = ..., + date: global___DataType.Date | None = ..., + time: global___DataType.Time | None = ..., + interval_year: global___DataType.IntervalYear | None = ..., + interval_day: global___DataType.IntervalDay | None = ..., + timestamp_tz: global___DataType.TimestampTZ | None = ..., + uuid: global___DataType.UUID | None = ..., + fixed_char: global___DataType.FixedChar | None = ..., + varchar: global___DataType.VarChar | None = ..., + fixed_binary: global___DataType.FixedBinary | None = ..., + decimal: global___DataType.Decimal | None = ..., + struct: global___DataType.Struct | None = ..., + list: global___DataType.List | None = ..., + map: global___DataType.Map | None = ..., user_defined_type_reference: builtins.int = ..., ) -> None: ... def HasField( @@ -900,4 +904,4 @@ class Type(google.protobuf.message.Message): "user_defined_type_reference", ] | None: ... -global___Type = Type +global___DataType = DataType --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org