This is an automated email from the ASF dual-hosted git repository.

agrove pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/datafusion-comet.git


The following commit(s) were added to refs/heads/main by this push:
     new c6136aa60 fix: Fix generate-user-guide-reference-docs failure when mvn 
command is not executed at root (#2691)
c6136aa60 is described below

commit c6136aa602f769e5666d44b338b8ed1234b70aaf
Author: Manu Zhang <[email protected]>
AuthorDate: Thu Nov 6 21:28:47 2025 +0800

    fix: Fix generate-user-guide-reference-docs failure when mvn command is not 
executed at root (#2691)
---
 docs/source/user-guide/latest/compatibility.md           |  4 ++--
 spark/pom.xml                                            |  1 +
 spark/src/main/scala/org/apache/comet/GenerateDocs.scala | 15 ++++++---------
 3 files changed, 9 insertions(+), 11 deletions(-)

diff --git a/docs/source/user-guide/latest/compatibility.md 
b/docs/source/user-guide/latest/compatibility.md
index 908693ff5..7a18e3b4a 100644
--- a/docs/source/user-guide/latest/compatibility.md
+++ b/docs/source/user-guide/latest/compatibility.md
@@ -56,8 +56,8 @@ and sorting on floating-point data can be enabled by setting 
`spark.comet.expres
 ## Incompatible Expressions
 
 Expressions that are not 100% Spark-compatible will fall back to Spark by 
default and can be enabled by setting
-`spark.comet.expression.EXPRNAME.allowIncompatible=true`, where `EXPRNAME` is 
the Spark expression class name. See 
-the [Comet Supported Expressions Guide](expressions.md) for more information 
on this configuration setting.  
+`spark.comet.expression.EXPRNAME.allowIncompatible=true`, where `EXPRNAME` is 
the Spark expression class name. See
+the [Comet Supported Expressions Guide](expressions.md) for more information 
on this configuration setting.
 
 It is also possible to specify `spark.comet.expression.allowIncompatible=true` 
to enable all
 incompatible expressions.
diff --git a/spark/pom.xml b/spark/pom.xml
index aa3ad27bd..2ff5fcb33 100644
--- a/spark/pom.xml
+++ b/spark/pom.xml
@@ -326,6 +326,7 @@ under the License.
             </goals>
             <configuration>
               <mainClass>org.apache.comet.GenerateDocs</mainClass>
+              
<arguments>${project.parent.basedir}/docs/source/user-guide/latest/</arguments>
               <classpathScope>compile</classpathScope>
             </configuration>
           </execution>
diff --git a/spark/src/main/scala/org/apache/comet/GenerateDocs.scala 
b/spark/src/main/scala/org/apache/comet/GenerateDocs.scala
index 0fd8b2706..4c2d65e1c 100644
--- a/spark/src/main/scala/org/apache/comet/GenerateDocs.scala
+++ b/spark/src/main/scala/org/apache/comet/GenerateDocs.scala
@@ -36,18 +36,16 @@ import org.apache.comet.serde.{Compatible, Incompatible, 
QueryPlanSerde}
  */
 object GenerateDocs {
 
-  private def userGuideLocation = "docs/source/user-guide/latest/"
-
-  val publicConfigs: Set[ConfigEntry[_]] = 
CometConf.allConfs.filter(_.isPublic).toSet
+  private val publicConfigs: Set[ConfigEntry[_]] = 
CometConf.allConfs.filter(_.isPublic).toSet
 
   def main(args: Array[String]): Unit = {
-    generateConfigReference()
-    generateCompatibilityGuide()
+    val userGuideLocation = args(0)
+    generateConfigReference(s"$userGuideLocation/configs.md")
+    generateCompatibilityGuide(s"$userGuideLocation/compatibility.md")
   }
 
-  private def generateConfigReference(): Unit = {
+  private def generateConfigReference(filename: String): Unit = {
     val pattern = "<!--BEGIN:CONFIG_TABLE\\[(.*)]-->".r
-    val filename = s"$userGuideLocation/configs.md"
     val lines = readFile(filename)
     val w = new BufferedOutputStream(new FileOutputStream(filename))
     for (line <- lines) {
@@ -95,8 +93,7 @@ object GenerateDocs {
     w.close()
   }
 
-  private def generateCompatibilityGuide(): Unit = {
-    val filename = s"$userGuideLocation/compatibility.md"
+  private def generateCompatibilityGuide(filename: String): Unit = {
     val lines = readFile(filename)
     val w = new BufferedOutputStream(new FileOutputStream(filename))
     for (line <- lines) {


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to