zentol commented on a change in pull request #7451:  [FLINK-11270][build] Do 
not include hadoop in flink-dist by default
URL: https://github.com/apache/flink/pull/7451#discussion_r248222744
 
 

 ##########
 File path: 
flink-yarn-tests/src/test/java/org/apache/flink/yarn/YarnTestBase.java
 ##########
 @@ -508,18 +517,40 @@ private static void start(YarnConfiguration conf, String 
principal, String keyta
                }
                System.setProperty("user.home", homeDir.getAbsolutePath());
                String uberjarStartLoc = "..";
+
+               // find flink-dist jar in flink-dist module
                LOG.info("Trying to locate uberjar in {}", new 
File(uberjarStartLoc));
-               flinkUberjar = findFile(uberjarStartLoc, new 
RootDirFilenameFilter());
-               Assert.assertNotNull("Flink uberjar not found", flinkUberjar);
-               String flinkDistRootDir = 
flinkUberjar.getParentFile().getParent();
-               flinkLibFolder = flinkUberjar.getParentFile(); // the uberjar 
is located in lib/
-               Assert.assertNotNull("Flink flinkLibFolder not found", 
flinkLibFolder);
+               final File originalFlinkDistJar = findFile(uberjarStartLoc, new 
RootDirFilenameFilter());
+
+               // copy entirety of distribution into a temporary location
+               final Path originalFlinkDistRootDir = 
originalFlinkDistJar.getParentFile().getParentFile().toPath();
+               System.out.println("dist=" + originalFlinkDistRootDir);
+               final Path flinkDistRootDir = 
tmp.newFolder("tmp_dist_directory").toPath();
+
+               FileUtils.copyDirectory(originalFlinkDistRootDir.toFile(), 
flinkDistRootDir.toFile());
+
+               flinkLibFolder = flinkDistRootDir.resolve("lib").toFile();
                Assert.assertTrue("lib folder not found", 
flinkLibFolder.exists());
                Assert.assertTrue("lib folder not found", 
flinkLibFolder.isDirectory());
+               try (Stream<Path> libJars = 
Files.list(flinkLibFolder.toPath())) {
+                       final Optional<File> flinkDistJarOptional =
+                               libJars.map(Path::getFileName)
+                                       .map(Path::toString)
+                                       
.filter(RootDirFilenameFilter::isFlinkDistJar)
+                                       .map(fileName -> 
flinkLibFolder.toPath().resolve(Paths.get(fileName)))
+                                       .map(Path::toFile)
+                                       .findAny();
+                       flinkUberjar = flinkDistJarOptional.orElseThrow(() -> 
new AssertionError("Unable to locate flink-dist jar."));
+               }
 
-               if (!flinkUberjar.exists()) {
-                       Assert.fail("Unable to locate yarn-uberjar.jar");
+               // copy flink-shaded-hadoop2 into dist, since it is not 
included by default
+               // the hadoop jar was copied into the dependencies directory 
during the build using the maven-dependency-plugin
+               final Path relHadoopPath;
+               try (Stream<Path> dependencyJars = 
Files.list(Paths.get("target/dependencies"))) {
+                       relHadoopPath = dependencyJars.filter(jar -> 
jar.getFileName().toString().startsWith("flink-shaded-hadoop2"))
+                               .findAny().orElseThrow(() -> new 
AssertionError("Unable to locate flink-shaded-hadoop2 jar."));
                }
+               Files.copy(relHadoopPath, 
flinkLibFolder.toPath().resolve("flink-shaded-hadoop2.jar"));
 
 Review comment:
   That could probably work, but you'd need to give me more hints how this 
could be implemented.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

Reply via email to