This is an automated email from the ASF dual-hosted git repository.

xushiyan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new 7f72c2b07ab5 fix: correct indentation in utils.py and add docker 
compose validation (#14168)
7f72c2b07ab5 is described below

commit 7f72c2b07ab5b77a3e10146838a8d7a86f0306db
Author: deepakpanda93 <[email protected]>
AuthorDate: Tue Oct 28 20:41:43 2025 +0530

    fix: correct indentation in utils.py and add docker compose validation 
(#14168)
---
 hudi-notebooks/Dockerfile.hive    |  3 ---
 hudi-notebooks/notebooks/utils.py |  2 +-
 hudi-notebooks/run_spark_hudi.sh  | 25 +++++++++++++++++++++----
 3 files changed, 22 insertions(+), 8 deletions(-)

diff --git a/hudi-notebooks/Dockerfile.hive b/hudi-notebooks/Dockerfile.hive
index 95cb0f863471..4db708947447 100644
--- a/hudi-notebooks/Dockerfile.hive
+++ b/hudi-notebooks/Dockerfile.hive
@@ -20,8 +20,6 @@ FROM apache/hive:$HIVE_VERSION
 
 USER root
 
-ARG POSTGRES_JDBC_VERSION=${POSTGRES_JDBC_VERSION:-42.7.3}
-
 RUN apt-get update && apt-get -y install wget vim && \
     apt-get clean && rm -rf /var/lib/apt/lists/*
 
@@ -30,5 +28,4 @@ COPY conf/hive/metastore-site.xml 
$HIVE_HOME/conf/metastore-site.xml
 RUN mkdir -p $HIVE_HOME/.beeline && \
     ln -s $HADOOP_HOME/share/hadoop/tools/lib/hadoop-aws-3.1.0.jar 
$HIVE_HOME/lib/hadoop-aws.jar && \
     ln -s $HADOOP_HOME/share/hadoop/tools/lib/aws-java-sdk-bundle-1.11.271.jar 
$HIVE_HOME/lib/aws-java-sdk.jar && \
-    wget -O $HIVE_HOME/lib/postgresql-$POSTGRES_JDBC_VERSION.jar 
https://jdbc.postgresql.org/download/postgresql-$POSTGRES_JDBC_VERSION.jar && \
     sed -i 
'/<name>hive.execution.engine<\/name>/{n;s|<value>.*</value>|<value>mr</value>|}'
 "$HIVE_HOME/conf/hive-site.xml"
\ No newline at end of file
diff --git a/hudi-notebooks/notebooks/utils.py 
b/hudi-notebooks/notebooks/utils.py
index 1a888ffe3ab0..007761ebe53d 100644
--- a/hudi-notebooks/notebooks/utils.py
+++ b/hudi-notebooks/notebooks/utils.py
@@ -54,7 +54,7 @@ def ls(base_path):
     if not base_path.startswith("s3a://"):
         raise ValueError("Path must start with 's3a://'")
     try:
-           hadoop_conf = spark._jsc.hadoopConfiguration()
+        hadoop_conf = spark._jsc.hadoopConfiguration()
         fs = spark._jvm.org.apache.hadoop.fs.FileSystem.get(hadoop_conf)
         p = spark._jvm.org.apache.hadoop.fs.Path(base_path)
         if not fs.exists(p):
diff --git a/hudi-notebooks/run_spark_hudi.sh b/hudi-notebooks/run_spark_hudi.sh
index 330a6659515f..4cda555a71a5 100644
--- a/hudi-notebooks/run_spark_hudi.sh
+++ b/hudi-notebooks/run_spark_hudi.sh
@@ -18,16 +18,33 @@
 state=${1:-"start"}
 state=$(echo "$state" | tr '[:upper:]' '[:lower:]')
 
+# ----------------------------------------------------------
+# Function to determine which docker compose command to use
+# ----------------------------------------------------------
+get_docker_compose_cmd() {
+    if docker compose version &>/dev/null; then
+        echo "docker compose"
+    elif docker-compose version &>/dev/null; then
+        echo "docker-compose"
+    else
+        echo "ERROR: Neither 'docker compose' nor 'docker-compose' is 
installed or available in PATH." >&2
+        exit 1
+    fi
+}
+
+# Detect and assign the correct compose command
+DOCKER_COMPOSE_CMD=$(get_docker_compose_cmd)
+
 case "$state" in
   start)
-    docker-compose up -d
+    $DOCKER_COMPOSE_CMD up -d
     ;;
   stop)
-    docker-compose down
+    $DOCKER_COMPOSE_CMD down
     ;;
   restart)
-    docker-compose down
-    docker-compose up -d --build
+    $DOCKER_COMPOSE_CMD down
+    $DOCKER_COMPOSE_CMD up -d --build
     ;;
   *)
     echo "Usage: $0 {start|stop|restart}"

Reply via email to