justaparth commented on code in PR #40686:
URL: https://github.com/apache/spark/pull/40686#discussion_r1183133402
##########
connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/utils/ProtobufOptions.scala:
##########
@@ -46,6 +46,41 @@ private[sql] class ProtobufOptions(
// record has more depth than the allowed value for recursive fields, it
will be truncated
// and corresponding fields are ignored (dropped).
val recursiveFieldMaxDepth: Int =
parameters.getOrElse("recursive.fields.max.depth", "-1").toInt
+
+ // Whether to render fields with zero values when deserializing protobufs to
a spark struct.
+ // When a field is empty in the serialized protobuf, this library will
deserialize them as
+ // null by default. However, this flag can control whether to render the
type-specific zero value.
+ // This operates similarly to `includingDefaultValues` in java's jsonformat,
or `emitDefaults` in
Review Comment:
done!
##########
connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/utils/ProtobufOptions.scala:
##########
@@ -46,6 +46,41 @@ private[sql] class ProtobufOptions(
// record has more depth than the allowed value for recursive fields, it
will be truncated
// and corresponding fields are ignored (dropped).
val recursiveFieldMaxDepth: Int =
parameters.getOrElse("recursive.fields.max.depth", "-1").toInt
+
+ // Whether to render fields with zero values when deserializing protobufs to
a spark struct.
Review Comment:
done
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]