This is an automated email from the ASF dual-hosted git repository.

hvanhovell pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 35187995bf9 [SPARK-44530][CORE][CONNECT] Move SparkBuildInfo to 
common/util
35187995bf9 is described below

commit 35187995bf9eaaa57fd29b714a304355c3049c39
Author: Herman van Hovell <her...@databricks.com>
AuthorDate: Tue Jul 25 20:51:27 2023 -0400

    [SPARK-44530][CORE][CONNECT] Move SparkBuildInfo to common/util
    
    ### What changes were proposed in this pull request?
    This PR moves `SparkBuildInfo` and the code that generates its properties 
to `common/util`.
    
    ### Why are the changes needed?
    We need `SparkBuildInfo` in the connect scala client and we are removing 
connect's dependency on `core`.
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Existing tests.
    
    Closes #42133 from hvanhovell/SPARK-44530.
    
    Authored-by: Herman van Hovell <her...@databricks.com>
    Signed-off-by: Herman van Hovell <her...@databricks.com>
---
 common/utils/pom.xml                               | 62 ++++++++++++++++++++
 .../scala/org/apache/spark/SparkBuildInfo.scala    | 65 +++++++++++++++++++++
 core/pom.xml                                       | 62 --------------------
 core/src/main/scala/org/apache/spark/package.scala | 66 +++-------------------
 project/SparkBuild.scala                           | 39 +++++++------
 5 files changed, 158 insertions(+), 136 deletions(-)

diff --git a/common/utils/pom.xml b/common/utils/pom.xml
index 20abad21243..2f2fee0cf41 100644
--- a/common/utils/pom.xml
+++ b/common/utils/pom.xml
@@ -84,7 +84,69 @@
   <build>
     
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
     
<testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
+    <resources>
+      <resource>
+        <directory>${project.basedir}/src/main/resources</directory>
+      </resource>
+      <resource>
+        <!-- Include the properties file to provide the build information. -->
+        <directory>${project.build.directory}/extra-resources</directory>
+        <filtering>true</filtering>
+      </resource>
+    </resources>
     <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-antrun-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>choose-shell-and-script</id>
+            <phase>validate</phase>
+            <goals>
+              <goal>run</goal>
+            </goals>
+            <configuration>
+              <exportAntProperties>true</exportAntProperties>
+              <target>
+                <condition property="shell" value="powershell.exe" else="bash">
+                  <and>
+                    <os family="windows"/>
+                  </and>
+                </condition>
+                <condition property="spark-build-info-script" 
value="spark-build-info.ps1"
+                           else="spark-build-info">
+                  <and>
+                    <os family="windows"/>
+                  </and>
+                </condition>
+                <echo>Shell to use for generating 
spark-version-info.properties file =
+                  ${shell}
+                </echo>
+                <echo>Script to use for generating 
spark-version-info.properties file =
+                  ${spark-build-info-script}
+                </echo>
+              </target>
+            </configuration>
+          </execution>
+          <execution>
+            <id>generate-spark-build-info</id>
+            <phase>generate-resources</phase>
+            <configuration>
+              <!-- Execute the shell script to generate the spark build 
information. -->
+              <target>
+                <exec executable="${shell}">
+                  <arg 
value="${project.basedir}/../../build/${spark-build-info-script}"/>
+                  <arg value="${project.build.directory}/extra-resources"/>
+                  <arg value="${project.version}"/>
+                </exec>
+              </target>
+            </configuration>
+            <goals>
+              <goal>run</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
     </plugins>
   </build>
 </project>
diff --git a/common/utils/src/main/scala/org/apache/spark/SparkBuildInfo.scala 
b/common/utils/src/main/scala/org/apache/spark/SparkBuildInfo.scala
new file mode 100644
index 00000000000..23f671f9d76
--- /dev/null
+++ b/common/utils/src/main/scala/org/apache/spark/SparkBuildInfo.scala
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.spark
+
+import java.util.Properties
+
+object SparkBuildInfo {
+
+  val (
+    spark_version: String,
+    spark_branch: String,
+    spark_revision: String,
+    spark_build_user: String,
+    spark_repo_url: String,
+    spark_build_date: String,
+    spark_doc_root: String) = {
+
+    val resourceStream = Thread.currentThread().getContextClassLoader.
+      getResourceAsStream("spark-version-info.properties")
+    if (resourceStream == null) {
+      throw new SparkException("Could not find spark-version-info.properties")
+    }
+
+    try {
+      val unknownProp = "<unknown>"
+      val props = new Properties()
+      props.load(resourceStream)
+      (
+        props.getProperty("version", unknownProp),
+        props.getProperty("branch", unknownProp),
+        props.getProperty("revision", unknownProp),
+        props.getProperty("user", unknownProp),
+        props.getProperty("url", unknownProp),
+        props.getProperty("date", unknownProp),
+        props.getProperty("docroot", unknownProp)
+      )
+    } catch {
+      case e: Exception =>
+        throw new SparkException("Error loading properties from 
spark-version-info.properties", e)
+    } finally {
+      if (resourceStream != null) {
+        try {
+          resourceStream.close()
+        } catch {
+          case e: Exception =>
+            throw new SparkException("Error closing spark build info resource 
stream", e)
+        }
+      }
+    }
+  }
+}
diff --git a/core/pom.xml b/core/pom.xml
index 6e3552c90c3..1d552a65f7b 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -513,69 +513,7 @@
   <build>
     
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
     
<testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
-    <resources>
-      <resource>
-        <directory>${project.basedir}/src/main/resources</directory>
-      </resource>
-      <resource>
-        <!-- Include the properties file to provide the build information. -->
-        <directory>${project.build.directory}/extra-resources</directory>
-        <filtering>true</filtering>
-      </resource>
-    </resources>
     <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-antrun-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>choose-shell-and-script</id>
-            <phase>validate</phase>
-            <goals>
-              <goal>run</goal>
-            </goals>
-            <configuration>
-              <exportAntProperties>true</exportAntProperties>
-              <target>
-                <condition property="shell" value="powershell.exe" else="bash">
-                  <and>
-                    <os family="windows"/>
-                  </and>
-                </condition>
-                <condition property="spark-build-info-script" 
value="spark-build-info.ps1"
-                           else="spark-build-info">
-                  <and>
-                    <os family="windows"/>
-                  </and>
-                </condition>
-                <echo>Shell to use for generating 
spark-version-info.properties file =
-                  ${shell}
-                </echo>
-                <echo>Script to use for generating 
spark-version-info.properties file =
-                  ${spark-build-info-script}
-                </echo>
-              </target>
-            </configuration>
-          </execution>
-          <execution>
-            <id>generate-spark-build-info</id>
-            <phase>generate-resources</phase>
-            <configuration>
-              <!-- Execute the shell script to generate the spark build 
information. -->
-              <target>
-                <exec executable="${shell}">
-                  <arg 
value="${project.basedir}/../build/${spark-build-info-script}"/>
-                  <arg value="${project.build.directory}/extra-resources"/>
-                  <arg value="${project.version}"/>
-                </exec>
-              </target>
-            </configuration>
-            <goals>
-              <goal>run</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-dependency-plugin</artifactId>
diff --git a/core/src/main/scala/org/apache/spark/package.scala 
b/core/src/main/scala/org/apache/spark/package.scala
index 92cab14294f..5b512fbcda8 100644
--- a/core/src/main/scala/org/apache/spark/package.scala
+++ b/core/src/main/scala/org/apache/spark/package.scala
@@ -17,8 +17,6 @@
 
 package org.apache
 
-import java.util.Properties
-
 import org.apache.spark.util.VersionUtils
 
 /**
@@ -45,60 +43,12 @@ import org.apache.spark.util.VersionUtils
  * level interfaces. These are subject to changes or removal in minor releases.
  */
 package object spark {
-
-  private object SparkBuildInfo {
-
-    val (
-        spark_version: String,
-        spark_branch: String,
-        spark_revision: String,
-        spark_build_user: String,
-        spark_repo_url: String,
-        spark_build_date: String,
-        spark_doc_root: String) = {
-
-      val resourceStream = Thread.currentThread().getContextClassLoader.
-        getResourceAsStream("spark-version-info.properties")
-      if (resourceStream == null) {
-        throw new SparkException("Could not find 
spark-version-info.properties")
-      }
-
-      try {
-        val unknownProp = "<unknown>"
-        val props = new Properties()
-        props.load(resourceStream)
-        (
-          props.getProperty("version", unknownProp),
-          props.getProperty("branch", unknownProp),
-          props.getProperty("revision", unknownProp),
-          props.getProperty("user", unknownProp),
-          props.getProperty("url", unknownProp),
-          props.getProperty("date", unknownProp),
-          props.getProperty("docroot", unknownProp)
-        )
-      } catch {
-        case e: Exception =>
-          throw new SparkException("Error loading properties from 
spark-version-info.properties", e)
-      } finally {
-        if (resourceStream != null) {
-          try {
-            resourceStream.close()
-          } catch {
-            case e: Exception =>
-              throw new SparkException("Error closing spark build info 
resource stream", e)
-          }
-        }
-      }
-    }
-  }
-
-  val SPARK_VERSION = SparkBuildInfo.spark_version
-  val SPARK_VERSION_SHORT = 
VersionUtils.shortVersion(SparkBuildInfo.spark_version)
-  val SPARK_BRANCH = SparkBuildInfo.spark_branch
-  val SPARK_REVISION = SparkBuildInfo.spark_revision
-  val SPARK_BUILD_USER = SparkBuildInfo.spark_build_user
-  val SPARK_REPO_URL = SparkBuildInfo.spark_repo_url
-  val SPARK_BUILD_DATE = SparkBuildInfo.spark_build_date
-  val SPARK_DOC_ROOT = SparkBuildInfo.spark_doc_root
+  val SPARK_VERSION: String = SparkBuildInfo.spark_version
+  val SPARK_VERSION_SHORT: String = 
VersionUtils.shortVersion(SparkBuildInfo.spark_version)
+  val SPARK_BRANCH: String = SparkBuildInfo.spark_branch
+  val SPARK_REVISION: String = SparkBuildInfo.spark_revision
+  val SPARK_BUILD_USER: String = SparkBuildInfo.spark_build_user
+  val SPARK_REPO_URL: String = SparkBuildInfo.spark_repo_url
+  val SPARK_BUILD_DATE: String = SparkBuildInfo.spark_build_date
+  val SPARK_DOC_ROOT: String = SparkBuildInfo.spark_doc_root
 }
-
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index cc27686b6b3..e585d5dd2b2 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -425,6 +425,8 @@ object SparkBuild extends PomBuild {
   }
 
   /* Generate and pick the spark build info from extra-resources */
+  enable(CommonUtils.settings)(commonUtils)
+
   enable(Core.settings)(core)
 
   /* Unsafe settings */
@@ -626,27 +628,13 @@ object SparkParallelTestGrouping {
   )
 }
 
-object Core {
+object CommonUtils {
   import scala.sys.process.Process
-  import BuildCommons.protoVersion
   def buildenv = Process(Seq("uname")).!!.trim.replaceFirst("[^A-Za-z0-9].*", 
"").toLowerCase
   def bashpath = Process(Seq("where", 
"bash")).!!.split("[\r\n]+").head.replace('\\', '/')
   lazy val settings = Seq(
-    // Setting version for the protobuf compiler. This has to be propagated to 
every sub-project
-    // even if the project is not using it.
-    PB.protocVersion := BuildCommons.protoVersion,
-    // For some reason the resolution from the imported Maven build does not 
work for some
-    // of these dependendencies that we need to shade later on.
-    libraryDependencies ++= {
-      Seq(
-        "com.google.protobuf" % "protobuf-java" % protoVersion % "protobuf"
-      )
-    },
-    (Compile / PB.targets) := Seq(
-      PB.gens.java -> (Compile / sourceManaged).value
-    ),
     (Compile / resourceGenerators) += Def.task {
-      val buildScript = baseDirectory.value + "/../build/spark-build-info"
+      val buildScript = baseDirectory.value + "/../../build/spark-build-info"
       val targetDir = baseDirectory.value + "/target/extra-resources/"
       // support Windows build under cygwin/mingw64, etc
       val bash = buildenv match {
@@ -658,6 +646,25 @@ object Core {
       val propsFile = baseDirectory.value / "target" / "extra-resources" / 
"spark-version-info.properties"
       Seq(propsFile)
     }.taskValue
+  )
+}
+
+object Core {
+  import BuildCommons.protoVersion
+  lazy val settings = Seq(
+    // Setting version for the protobuf compiler. This has to be propagated to 
every sub-project
+    // even if the project is not using it.
+    PB.protocVersion := BuildCommons.protoVersion,
+    // For some reason the resolution from the imported Maven build does not 
work for some
+    // of these dependendencies that we need to shade later on.
+    libraryDependencies ++= {
+      Seq(
+        "com.google.protobuf" % "protobuf-java" % protoVersion % "protobuf"
+      )
+    },
+    (Compile / PB.targets) := Seq(
+      PB.gens.java -> (Compile / sourceManaged).value
+    )
   ) ++ {
     val sparkProtocExecPath = sys.props.get("spark.protoc.executable.path")
     if (sparkProtocExecPath.isDefined) {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to