zhenlineo commented on code in PR #39866:
URL: https://github.com/apache/spark/pull/39866#discussion_r1095939209
##########
project/SparkBuild.scala:
##########
@@ -862,21 +862,27 @@ object SparkConnectClient {
(assembly / assemblyPackageScala / assembleArtifact) := false,
// Exclude `pmml-model-*.jar`,
`scala-collection-compat_*.jar`,`jsr305-*.jar` and
- // `netty-*.jar` and `unused-1.0.0.jar` from assembly.
+ // `unused-1.0.0.jar` and checker-qual from assembly.
(assembly / assemblyExcludedJars) := {
val cp = (assembly / fullClasspath).value
cp filter { v =>
val name = v.data.getName
name.startsWith("pmml-model-") ||
name.startsWith("scala-collection-compat_") ||
- name.startsWith("jsr305-") || name.startsWith("netty-") || name ==
"unused-1.0.0.jar"
+ name.startsWith("jsr305-") || name == "unused-1.0.0.jar" ||
name.startsWith("checker-qual")
}
},
(assembly / assemblyShadeRules) := Seq(
- ShadeRule.rename("io.grpc.**" ->
"org.sparkproject.connect.client.grpc.@0").inAll,
- ShadeRule.rename("com.google.protobuf.**" ->
"org.sparkproject.connect.protobuf.@1").inAll,
- ShadeRule.rename("com.google.common.**" ->
"org.sparkproject.connect.client.guava.@1").inAll,
- ShadeRule.rename("com.google.thirdparty.**" ->
"org.sparkproject.connect.client.guava.@1").inAll,
+ ShadeRule.rename("io.grpc.**" ->
"org.sparkproject.connect.client.io.grpc.@1").inAll,
+ ShadeRule.rename("com.google.**" ->
"org.sparkproject.connect.client.com.google.@1").inAll,
+ ShadeRule.rename("io.netty.**" ->
"org.sparkproject.connect.client.io.netty.@1").inAll,
+ ShadeRule.rename("org.checkerframework.**" ->
"org.sparkproject.connect.client.org.checkerframework.@1").inAll,
+ ShadeRule.rename("javax.annotation.**" ->
"org.sparkproject.connect.client.javax.annotation.@1").inAll,
Review Comment:
Yeah, currently the sbt and maven result are not exactly the same. SBT seems
skipped all runtime dependencies. Need to look a bit more. I assume you got the
SBT result, as all above are "runtime" dependencies.
##########
connector/connect/client/jvm/pom.xml:
##########
@@ -104,39 +106,70 @@
<shadedArtifactAttached>false</shadedArtifactAttached>
<artifactSet>
<includes>
- <include>com.google.guava:*</include>
- <include>io.grpc:*</include>
- <include>com.google.protobuf:*</include>
<include>org.apache.spark:spark-connect-common_${scala.binary.version}</include>
+ <include>com.google.android:*</include>
+ <include>com.google.api.grpc:*</include>
+ <include>com.google.code.findbugs:*</include>
+ <include>com.google.code.gson:*</include>
+ <include>com.google.errorprone:*</include>
+ <include>com.google.j2objc:*</include>
+ <include>com.google.protobuf:*</include>
+ <include>io.grpc:*</include>
+ <include>io.netty:*</include>
+ <include>io.perfmark:*</include>
+ <include>org.codehaus.mojo:*</include>
+ <include>org.objenesis:*</include>
</includes>
</artifactSet>
<relocations>
<relocation>
<pattern>io.grpc</pattern>
-
<shadedPattern>${spark.shade.packageName}.connect.client.grpc</shadedPattern>
+
<shadedPattern>${spark.shade.packageName}.connect.client.io.grpc</shadedPattern>
<includes>
<include>io.grpc.**</include>
</includes>
</relocation>
<relocation>
- <pattern>com.google.protobuf</pattern>
-
<shadedPattern>${spark.shade.packageName}.connect.protobuf</shadedPattern>
- <includes>
- <include>com.google.protobuf.**</include>
- </includes>
+ <pattern>com.google</pattern>
+
<shadedPattern>${spark.shade.packageName}.connect.client.com.google</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>io.netty</pattern>
+
<shadedPattern>${spark.shade.packageName}.connect.client.io.netty</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.checkerframework</pattern>
+
<shadedPattern>${spark.shade.packageName}.connect.client.checkerframework</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>javax.annotation</pattern>
+
<shadedPattern>${spark.shade.packageName}.connect.client.javax.annotation</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>io.perfmark</pattern>
+
<shadedPattern>${spark.shade.packageName}.connect.client.io.perfmark</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>org.codehaus</pattern>
+
<shadedPattern>${spark.shade.packageName}.connect.client.org.codehaus</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>android.annotation</pattern>
+
<shadedPattern>${spark.shade.packageName}.connect.client.android.annotation</shadedPattern>
</relocation>
+ <!-- Relocate the protos from spark-common into a client only
package -->
<relocation>
- <pattern>com.google.common</pattern>
-
<shadedPattern>${spark.shade.packageName}.connect.client.guava</shadedPattern>
+ <pattern>org.apache.spark.connect.proto</pattern>
+
<shadedPattern>${spark.shade.packageName}.connect.client.proto</shadedPattern>
<includes>
- <include>com.google.common.**</include>
+ <include>org.apache.spark.connect.proto.**</include>
</includes>
</relocation>
<relocation>
- <pattern>com.google.thirdparty</pattern>
-
<shadedPattern>${spark.shade.packageName}.connect.client.guava</shadedPattern>
+ <pattern>org.apache.spark.sql.connect.common</pattern>
Review Comment:
It allows client and server in the same JVM. e.g. client 3.4 + server 4.0 in
the same JVM. I see the work is not so much so I included this anyway.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]