Github user viirya commented on a diff in the pull request:
https://github.com/apache/spark/pull/15659#discussion_r87125921
--- Diff: python/setup.py ---
@@ -38,11 +38,22 @@
# A temporary path so we can access above the Python project root and
fetch scripts and jars we need
TEMP_PATH = "deps"
SPARK_HOME = os.path.abspath("../")
-JARS_PATH = os.path.join(SPARK_HOME, "assembly/target/scala-2.11/jars/")
-# Use the release jars path if we are in release mode.
-if (os.path.isfile("../RELEASE") and
len(glob.glob("../jars/spark*core*.jar")) == 1):
+# Figure out where the jars are we need to package with PySpark.
+JARS_PATH = glob.glob(os.path.join(SPARK_HOME,
"assembly/target/scala-*/jars/"))
+
+if len(JARS_PATH) == 1:
+ JARS_PATH = JARS_PATH[0]
+elif (os.path.isfile("../RELEASE") and
len(glob.glob("../jars/spark*core*.jar")) == 1):
+ # Release mode puts the jars in a jars directory
JARS_PATH = os.path.join(SPARK_HOME, "jars")
+elif len(JARS_PATH) > 1:
+ print("Assembly jars exist for multiple scalas, please cleanup
assembly/target",
+ file=sys.stderr)
+ sys.exit(-1)
+elif len(JARS_PATH) == 0 and not os.path.exists("deps"):
--- End diff --
nit: "deps" -> TEMP_PATH, I think?
---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]