dianfu commented on a change in pull request #15813:
URL: https://github.com/apache/flink/pull/15813#discussion_r629276435
##########
File path:
flink-python/src/main/java/org/apache/flink/python/env/beam/ProcessPythonEnvironmentManager.java
##########
@@ -326,9 +327,21 @@ private void constructArchivesDirectory(Map<String,
String> env) throws IOExcept
// extract archives to archives directory
for (Map.Entry<String, String> entry :
dependencyInfo.getArchives().entrySet()) {
- ZipUtils.extractZipFileWithPermissions(
- entry.getKey(),
- String.join(File.separator, archivesDirectory,
entry.getValue()));
+ String inFilePath = entry.getKey();
+ String targetDirPath =
+ String.join(File.separator, archivesDirectory,
entry.getValue());
+ if (inFilePath.endsWith(".zip") ||
inFilePath.endsWith(".jar")) {
Review comment:
What about convert inFilePath to lower case and then check the suffix?
##########
File path: flink-python/src/main/java/org/apache/flink/python/util/TarUtils.java
##########
@@ -0,0 +1,98 @@
+package org.apache.flink.python.util;
+
+import org.apache.flink.annotation.Internal;
+import org.apache.flink.util.IOUtils;
+
+import org.apache.commons.compress.archivers.ArchiveEntry;
+import org.apache.commons.compress.archivers.ArchiveInputStream;
+import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
+import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
+
+import java.io.BufferedInputStream;
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+
+/** Utils used to extract tar files. */
+@Internal
+public class TarUtils {
+ public static void unTar(String inFilePath, String targetDirPath)
+ throws IOException, InterruptedException {
+ File targetDir = new File(targetDirPath);
+ if (!targetDir.mkdirs()) {
+ if (!targetDir.isDirectory()) {
+ throw new IOException("Mkdirs failed to create " + targetDir);
+ }
+ }
+ boolean gzipped = inFilePath.endsWith("gz");
+ if (DecompressUtils.isUnix()) {
+ unTarUsingTar(inFilePath, targetDirPath, gzipped);
+ } else {
+ unTarUsingJava(inFilePath, targetDirPath, gzipped);
+ }
+ }
+
+ // Copy and simplify from hadoop-common package that is used in YARN
+ // See
https://github.com/apache/hadoop/blob/7f93349ee74da5f35276b7535781714501ab2457/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
+ private static void unTarUsingTar(String inFilePath, String targetDirPath,
boolean gzipped)
+ throws IOException, InterruptedException {
+ inFilePath = makeSecureShellPath(inFilePath);
+ targetDirPath = makeSecureShellPath(targetDirPath);
+ String untarCommand =
+ gzipped
+ ? String.format(
+ "gzip -dc '%s' | (cd '%s' && tar -xf -)",
inFilePath, targetDirPath)
+ : String.format("cd '%s' && tar -xf '%s'",
targetDirPath, inFilePath);
+ Process process = new ProcessBuilder("bash", "-c",
untarCommand).start();
+ int exitCode = process.waitFor();
+ if (exitCode != 0) {
+ throw new IOException(
+ "Error untarring file "
+ + inFilePath
+ + ". Tar process exited with exit code "
+ + exitCode);
+ }
+ }
+
+ // Follow the pattern suggested in
+ // https://commons.apache.org/proper/commons-compress/examples.html
+ private static void unTarUsingJava(String inFilePath, String
targetDirPath, boolean gzipped)
+ throws IOException {
+ try (InputStream fi = Files.newInputStream(Paths.get(inFilePath));
+ InputStream bi = new BufferedInputStream(fi);
+ ArchiveInputStream ai =
+ new TarArchiveInputStream(
+ gzipped ? new GzipCompressorInputStream(bi) :
bi)) {
+ ArchiveEntry entry;
+ while ((entry = ai.getNextEntry()) != null) {
+ File f = new File(targetDirPath, entry.getName());
+ if (entry.isDirectory()) {
+ if (!f.isDirectory() && !f.mkdirs()) {
+ throw new IOException("failed to create directory " +
f);
+ }
+ } else {
+ File parent = f.getParentFile();
+ if (!parent.isDirectory() && !parent.mkdirs()) {
+ throw new IOException("failed to create directory " +
parent);
+ }
+ OutputStream o = Files.newOutputStream(f.toPath());
+ byte[] buf = new byte[(int) entry.getSize()];
+ IOUtils.readFully(ai, buf, 0, buf.length);
Review comment:
If the size of entry is very big, it will cause OOM.
##########
File path:
flink-python/src/main/java/org/apache/flink/python/util/DecompressUtils.java
##########
@@ -0,0 +1,12 @@
+package org.apache.flink.python.util;
Review comment:
It lacks license header.
##########
File path: flink-python/src/main/java/org/apache/flink/python/util/TarUtils.java
##########
@@ -0,0 +1,98 @@
+package org.apache.flink.python.util;
+
+import org.apache.flink.annotation.Internal;
+import org.apache.flink.util.IOUtils;
+
+import org.apache.commons.compress.archivers.ArchiveEntry;
+import org.apache.commons.compress.archivers.ArchiveInputStream;
+import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
+import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
+
+import java.io.BufferedInputStream;
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+
+/** Utils used to extract tar files. */
+@Internal
+public class TarUtils {
+ public static void unTar(String inFilePath, String targetDirPath)
+ throws IOException, InterruptedException {
+ File targetDir = new File(targetDirPath);
+ if (!targetDir.mkdirs()) {
+ if (!targetDir.isDirectory()) {
+ throw new IOException("Mkdirs failed to create " + targetDir);
+ }
+ }
+ boolean gzipped = inFilePath.endsWith("gz");
+ if (DecompressUtils.isUnix()) {
+ unTarUsingTar(inFilePath, targetDirPath, gzipped);
+ } else {
+ unTarUsingJava(inFilePath, targetDirPath, gzipped);
+ }
+ }
+
+ // Copy and simplify from hadoop-common package that is used in YARN
+ // See
https://github.com/apache/hadoop/blob/7f93349ee74da5f35276b7535781714501ab2457/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
+ private static void unTarUsingTar(String inFilePath, String targetDirPath,
boolean gzipped)
+ throws IOException, InterruptedException {
+ inFilePath = makeSecureShellPath(inFilePath);
+ targetDirPath = makeSecureShellPath(targetDirPath);
+ String untarCommand =
+ gzipped
+ ? String.format(
+ "gzip -dc '%s' | (cd '%s' && tar -xf -)",
inFilePath, targetDirPath)
+ : String.format("cd '%s' && tar -xf '%s'",
targetDirPath, inFilePath);
+ Process process = new ProcessBuilder("bash", "-c",
untarCommand).start();
+ int exitCode = process.waitFor();
+ if (exitCode != 0) {
+ throw new IOException(
+ "Error untarring file "
+ + inFilePath
+ + ". Tar process exited with exit code "
+ + exitCode);
+ }
+ }
+
+ // Follow the pattern suggested in
+ // https://commons.apache.org/proper/commons-compress/examples.html
+ private static void unTarUsingJava(String inFilePath, String
targetDirPath, boolean gzipped)
+ throws IOException {
+ try (InputStream fi = Files.newInputStream(Paths.get(inFilePath));
+ InputStream bi = new BufferedInputStream(fi);
+ ArchiveInputStream ai =
+ new TarArchiveInputStream(
+ gzipped ? new GzipCompressorInputStream(bi) :
bi)) {
+ ArchiveEntry entry;
+ while ((entry = ai.getNextEntry()) != null) {
+ File f = new File(targetDirPath, entry.getName());
+ if (entry.isDirectory()) {
+ if (!f.isDirectory() && !f.mkdirs()) {
+ throw new IOException("failed to create directory " +
f);
Review comment:
```suggestion
throw new IOException("Failed to create directory "
+ f);
```
##########
File path: flink-python/src/main/java/org/apache/flink/python/util/TarUtils.java
##########
@@ -0,0 +1,98 @@
+package org.apache.flink.python.util;
+
+import org.apache.flink.annotation.Internal;
+import org.apache.flink.util.IOUtils;
+
+import org.apache.commons.compress.archivers.ArchiveEntry;
+import org.apache.commons.compress.archivers.ArchiveInputStream;
+import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
+import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
+
+import java.io.BufferedInputStream;
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+
+/** Utils used to extract tar files. */
+@Internal
+public class TarUtils {
+ public static void unTar(String inFilePath, String targetDirPath)
+ throws IOException, InterruptedException {
+ File targetDir = new File(targetDirPath);
+ if (!targetDir.mkdirs()) {
+ if (!targetDir.isDirectory()) {
+ throw new IOException("Mkdirs failed to create " + targetDir);
+ }
+ }
+ boolean gzipped = inFilePath.endsWith("gz");
+ if (DecompressUtils.isUnix()) {
+ unTarUsingTar(inFilePath, targetDirPath, gzipped);
+ } else {
+ unTarUsingJava(inFilePath, targetDirPath, gzipped);
+ }
+ }
+
+ // Copy and simplify from hadoop-common package that is used in YARN
+ // See
https://github.com/apache/hadoop/blob/7f93349ee74da5f35276b7535781714501ab2457/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
+ private static void unTarUsingTar(String inFilePath, String targetDirPath,
boolean gzipped)
+ throws IOException, InterruptedException {
+ inFilePath = makeSecureShellPath(inFilePath);
+ targetDirPath = makeSecureShellPath(targetDirPath);
+ String untarCommand =
+ gzipped
+ ? String.format(
+ "gzip -dc '%s' | (cd '%s' && tar -xf -)",
inFilePath, targetDirPath)
+ : String.format("cd '%s' && tar -xf '%s'",
targetDirPath, inFilePath);
+ Process process = new ProcessBuilder("bash", "-c",
untarCommand).start();
+ int exitCode = process.waitFor();
+ if (exitCode != 0) {
+ throw new IOException(
+ "Error untarring file "
+ + inFilePath
+ + ". Tar process exited with exit code "
+ + exitCode);
+ }
+ }
+
+ // Follow the pattern suggested in
+ // https://commons.apache.org/proper/commons-compress/examples.html
+ private static void unTarUsingJava(String inFilePath, String
targetDirPath, boolean gzipped)
+ throws IOException {
+ try (InputStream fi = Files.newInputStream(Paths.get(inFilePath));
+ InputStream bi = new BufferedInputStream(fi);
+ ArchiveInputStream ai =
+ new TarArchiveInputStream(
+ gzipped ? new GzipCompressorInputStream(bi) :
bi)) {
+ ArchiveEntry entry;
+ while ((entry = ai.getNextEntry()) != null) {
+ File f = new File(targetDirPath, entry.getName());
+ if (entry.isDirectory()) {
+ if (!f.isDirectory() && !f.mkdirs()) {
+ throw new IOException("failed to create directory " +
f);
+ }
+ } else {
+ File parent = f.getParentFile();
+ if (!parent.isDirectory() && !parent.mkdirs()) {
+ throw new IOException("failed to create directory " +
parent);
Review comment:
```suggestion
throw new IOException("Failed to create directory "
+ parent);
```
##########
File path: flink-python/src/main/java/org/apache/flink/python/util/TarUtils.java
##########
@@ -0,0 +1,98 @@
+package org.apache.flink.python.util;
+
+import org.apache.flink.annotation.Internal;
+import org.apache.flink.util.IOUtils;
+
+import org.apache.commons.compress.archivers.ArchiveEntry;
+import org.apache.commons.compress.archivers.ArchiveInputStream;
+import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
+import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
+
+import java.io.BufferedInputStream;
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+
+/** Utils used to extract tar files. */
+@Internal
+public class TarUtils {
+ public static void unTar(String inFilePath, String targetDirPath)
+ throws IOException, InterruptedException {
+ File targetDir = new File(targetDirPath);
+ if (!targetDir.mkdirs()) {
+ if (!targetDir.isDirectory()) {
+ throw new IOException("Mkdirs failed to create " + targetDir);
+ }
+ }
+ boolean gzipped = inFilePath.endsWith("gz");
+ if (DecompressUtils.isUnix()) {
+ unTarUsingTar(inFilePath, targetDirPath, gzipped);
+ } else {
+ unTarUsingJava(inFilePath, targetDirPath, gzipped);
+ }
+ }
+
+ // Copy and simplify from hadoop-common package that is used in YARN
+ // See
https://github.com/apache/hadoop/blob/7f93349ee74da5f35276b7535781714501ab2457/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
+ private static void unTarUsingTar(String inFilePath, String targetDirPath,
boolean gzipped)
+ throws IOException, InterruptedException {
+ inFilePath = makeSecureShellPath(inFilePath);
+ targetDirPath = makeSecureShellPath(targetDirPath);
+ String untarCommand =
+ gzipped
+ ? String.format(
+ "gzip -dc '%s' | (cd '%s' && tar -xf -)",
inFilePath, targetDirPath)
+ : String.format("cd '%s' && tar -xf '%s'",
targetDirPath, inFilePath);
+ Process process = new ProcessBuilder("bash", "-c",
untarCommand).start();
+ int exitCode = process.waitFor();
+ if (exitCode != 0) {
+ throw new IOException(
+ "Error untarring file "
+ + inFilePath
+ + ". Tar process exited with exit code "
+ + exitCode);
+ }
+ }
+
+ // Follow the pattern suggested in
+ // https://commons.apache.org/proper/commons-compress/examples.html
+ private static void unTarUsingJava(String inFilePath, String
targetDirPath, boolean gzipped)
+ throws IOException {
+ try (InputStream fi = Files.newInputStream(Paths.get(inFilePath));
+ InputStream bi = new BufferedInputStream(fi);
+ ArchiveInputStream ai =
+ new TarArchiveInputStream(
+ gzipped ? new GzipCompressorInputStream(bi) :
bi)) {
+ ArchiveEntry entry;
+ while ((entry = ai.getNextEntry()) != null) {
+ File f = new File(targetDirPath, entry.getName());
+ if (entry.isDirectory()) {
+ if (!f.isDirectory() && !f.mkdirs()) {
+ throw new IOException("failed to create directory " +
f);
+ }
+ } else {
Review comment:
It also considered link and hard link in
https://github.com/apache/hadoop/blob/7f93349ee74da5f35276b7535781714501ab2457/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java#L991.
Should we also consider that?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]