This is an automated email from the ASF dual-hosted git repository.
yikun pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark-docker.git
The following commit(s) were added to refs/heads/master by this push:
new 7f9b414 [SPARK-43372] Use ; instead of && when enable set -ex
7f9b414 is described below
commit 7f9b414de48639d69c64acfd81e6792517b86f61
Author: Yikun Jiang <[email protected]>
AuthorDate: Mon May 8 11:19:36 2023 +0800
[SPARK-43372] Use ; instead of && when enable set -ex
### What changes were proposed in this pull request?
- Use ; instead of && when enable set -ex
- ./add-dockerfiles.sh 3.4.0 to apply changes
### Why are the changes needed?
Address DOI comments: `9. using set -ex means you can use ; instead of &&
(really only matters for complex expressions, like the || in the later RUN that
does use ;)`
https://github.com/docker-library/official-images/pull/13089#issuecomment-1533540388
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
CI passed
Closes #38 from Yikun/SPARK-43372.
Authored-by: Yikun Jiang <[email protected]>
Signed-off-by: Yikun Jiang <[email protected]>
---
3.4.0/scala2.12-java11-python3-r-ubuntu/Dockerfile | 10 +++----
3.4.0/scala2.12-java11-python3-ubuntu/Dockerfile | 8 +++---
3.4.0/scala2.12-java11-r-ubuntu/Dockerfile | 8 +++---
3.4.0/scala2.12-java11-ubuntu/Dockerfile | 32 +++++++++++-----------
Dockerfile.template | 32 +++++++++++-----------
r-python.template | 10 +++----
6 files changed, 50 insertions(+), 50 deletions(-)
diff --git a/3.4.0/scala2.12-java11-python3-r-ubuntu/Dockerfile
b/3.4.0/scala2.12-java11-python3-r-ubuntu/Dockerfile
index 86337c5..12c7a4f 100644
--- a/3.4.0/scala2.12-java11-python3-r-ubuntu/Dockerfile
+++ b/3.4.0/scala2.12-java11-python3-r-ubuntu/Dockerfile
@@ -17,11 +17,11 @@
ARG BASE_IMAGE=spark:3.4.0-scala2.12-java11-ubuntu
FROM $BASE_IMAGE
-RUN set -ex && \
- apt-get update && \
- apt install -y python3 python3-pip && \
- apt install -y r-base r-base-dev && \
- rm -rf /var/cache/apt/* && \
+RUN set -ex; \
+ apt-get update; \
+ apt install -y python3 python3-pip; \
+ apt install -y r-base r-base-dev; \
+ rm -rf /var/cache/apt/*; \
rm -rf /var/lib/apt/lists/*
ENV R_HOME /usr/lib/R
diff --git a/3.4.0/scala2.12-java11-python3-ubuntu/Dockerfile
b/3.4.0/scala2.12-java11-python3-ubuntu/Dockerfile
index 540805f..1f0dd1f 100644
--- a/3.4.0/scala2.12-java11-python3-ubuntu/Dockerfile
+++ b/3.4.0/scala2.12-java11-python3-ubuntu/Dockerfile
@@ -17,8 +17,8 @@
ARG BASE_IMAGE=spark:3.4.0-scala2.12-java11-ubuntu
FROM $BASE_IMAGE
-RUN set -ex && \
- apt-get update && \
- apt install -y python3 python3-pip && \
- rm -rf /var/cache/apt/* && \
+RUN set -ex; \
+ apt-get update; \
+ apt install -y python3 python3-pip; \
+ rm -rf /var/cache/apt/*; \
rm -rf /var/lib/apt/lists/*
diff --git a/3.4.0/scala2.12-java11-r-ubuntu/Dockerfile
b/3.4.0/scala2.12-java11-r-ubuntu/Dockerfile
index c65c2ce..53647b2 100644
--- a/3.4.0/scala2.12-java11-r-ubuntu/Dockerfile
+++ b/3.4.0/scala2.12-java11-r-ubuntu/Dockerfile
@@ -17,10 +17,10 @@
ARG BASE_IMAGE=spark:3.4.0-scala2.12-java11-ubuntu
FROM $BASE_IMAGE
-RUN set -ex && \
- apt-get update && \
- apt install -y r-base r-base-dev && \
- rm -rf /var/cache/apt/* && \
+RUN set -ex; \
+ apt-get update; \
+ apt install -y r-base r-base-dev; \
+ rm -rf /var/cache/apt/*; \
rm -rf /var/lib/apt/lists/*
ENV R_HOME /usr/lib/R
diff --git a/3.4.0/scala2.12-java11-ubuntu/Dockerfile
b/3.4.0/scala2.12-java11-ubuntu/Dockerfile
index 21d95d4..11f997f 100644
--- a/3.4.0/scala2.12-java11-ubuntu/Dockerfile
+++ b/3.4.0/scala2.12-java11-ubuntu/Dockerfile
@@ -21,22 +21,22 @@ ARG spark_uid=185
RUN groupadd --system --gid=${spark_uid} spark && \
useradd --system --uid=${spark_uid} --gid=spark spark
-RUN set -ex && \
- apt-get update && \
- ln -s /lib /lib64 && \
- apt install -y gnupg2 wget bash tini libc6 libpam-modules krb5-user
libnss3 procps net-tools gosu && \
- mkdir -p /opt/spark && \
- mkdir /opt/spark/python && \
- mkdir -p /opt/spark/examples && \
- mkdir -p /opt/spark/work-dir && \
- chmod g+w /opt/spark/work-dir && \
- touch /opt/spark/RELEASE && \
- chown -R spark:spark /opt/spark && \
- rm /bin/sh && \
- ln -sv /bin/bash /bin/sh && \
- echo "auth required pam_wheel.so use_uid" >> /etc/pam.d/su && \
- chgrp root /etc/passwd && chmod ug+rw /etc/passwd && \
- rm -rf /var/cache/apt/* && \
+RUN set -ex; \
+ apt-get update; \
+ ln -s /lib /lib64; \
+ apt install -y gnupg2 wget bash tini libc6 libpam-modules krb5-user
libnss3 procps net-tools gosu; \
+ mkdir -p /opt/spark; \
+ mkdir /opt/spark/python; \
+ mkdir -p /opt/spark/examples; \
+ mkdir -p /opt/spark/work-dir; \
+ chmod g+w /opt/spark/work-dir; \
+ touch /opt/spark/RELEASE; \
+ chown -R spark:spark /opt/spark; \
+ rm /bin/sh; \
+ ln -sv /bin/bash /bin/sh; \
+ echo "auth required pam_wheel.so use_uid" >> /etc/pam.d/su; \
+ chgrp root /etc/passwd && chmod ug+rw /etc/passwd; \
+ rm -rf /var/cache/apt/*; \
rm -rf /var/lib/apt/lists/*
# Install Apache Spark
diff --git a/Dockerfile.template b/Dockerfile.template
index db01a87..6e85cd3 100644
--- a/Dockerfile.template
+++ b/Dockerfile.template
@@ -21,22 +21,22 @@ ARG spark_uid=185
RUN groupadd --system --gid=${spark_uid} spark && \
useradd --system --uid=${spark_uid} --gid=spark spark
-RUN set -ex && \
- apt-get update && \
- ln -s /lib /lib64 && \
- apt install -y gnupg2 wget bash tini libc6 libpam-modules krb5-user
libnss3 procps net-tools gosu && \
- mkdir -p /opt/spark && \
- mkdir /opt/spark/python && \
- mkdir -p /opt/spark/examples && \
- mkdir -p /opt/spark/work-dir && \
- chmod g+w /opt/spark/work-dir && \
- touch /opt/spark/RELEASE && \
- chown -R spark:spark /opt/spark && \
- rm /bin/sh && \
- ln -sv /bin/bash /bin/sh && \
- echo "auth required pam_wheel.so use_uid" >> /etc/pam.d/su && \
- chgrp root /etc/passwd && chmod ug+rw /etc/passwd && \
- rm -rf /var/cache/apt/* && \
+RUN set -ex; \
+ apt-get update; \
+ ln -s /lib /lib64; \
+ apt install -y gnupg2 wget bash tini libc6 libpam-modules krb5-user
libnss3 procps net-tools gosu; \
+ mkdir -p /opt/spark; \
+ mkdir /opt/spark/python; \
+ mkdir -p /opt/spark/examples; \
+ mkdir -p /opt/spark/work-dir; \
+ chmod g+w /opt/spark/work-dir; \
+ touch /opt/spark/RELEASE; \
+ chown -R spark:spark /opt/spark; \
+ rm /bin/sh; \
+ ln -sv /bin/bash /bin/sh; \
+ echo "auth required pam_wheel.so use_uid" >> /etc/pam.d/su; \
+ chgrp root /etc/passwd && chmod ug+rw /etc/passwd; \
+ rm -rf /var/cache/apt/*; \
rm -rf /var/lib/apt/lists/*
# Install Apache Spark
diff --git a/r-python.template b/r-python.template
index fec4e70..1c3087e 100644
--- a/r-python.template
+++ b/r-python.template
@@ -17,15 +17,15 @@
ARG BASE_IMAGE=spark:{{ SPARK_VERSION }}-scala{{ SCALA_VERSION }}-java{{
JAVA_VERSION }}-ubuntu
FROM $BASE_IMAGE
-RUN set -ex && \
- apt-get update && \
+RUN set -ex; \
+ apt-get update; \
{%- if HAVE_PY %}
- apt install -y python3 python3-pip && \
+ apt install -y python3 python3-pip; \
{%- endif %}
{%- if HAVE_R %}
- apt install -y r-base r-base-dev && \
+ apt install -y r-base r-base-dev; \
{%- endif %}
- rm -rf /var/cache/apt/* && \
+ rm -rf /var/cache/apt/*; \
rm -rf /var/lib/apt/lists/*
{%- if HAVE_R %}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]