git commit: SPARK-1203 fix saving to hdfs from yarn
Repository: spark Updated Branches: refs/heads/branch-0.9 d385b5a19 -> 250ec271c SPARK-1203 fix saving to hdfs from yarn Author: Thomas Graves Closes #173 from tgravescs/SPARK-1203 and squashes the following commits: 4fd5ded [Thomas Graves] adding import 964e3f7 [Thomas Graves] SPARK-1203 fix saving to hdfs from yarn Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/250ec271 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/250ec271 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/250ec271 Branch: refs/heads/branch-0.9 Commit: 250ec271c6136b881f7ffcb9ce746c30d4abbf3c Parents: d385b5a Author: Thomas Graves Authored: Wed Mar 19 08:09:20 2014 -0500 Committer: Thomas Graves Committed: Wed Mar 19 08:19:47 2014 -0500 -- core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala | 2 ++ 1 file changed, 2 insertions(+) -- http://git-wip-us.apache.org/repos/asf/spark/blob/250ec271/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala -- diff --git a/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala b/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala index 0b2917b..0bc09b3 100644 --- a/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala +++ b/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala @@ -44,6 +44,7 @@ import com.clearspring.analytics.stream.cardinality.HyperLogLog import org.apache.hadoop.mapred.SparkHadoopWriter import org.apache.hadoop.mapreduce.SparkHadoopMapReduceUtil import org.apache.spark._ +import org.apache.spark.deploy.SparkHadoopUtil import org.apache.spark.SparkContext._ import org.apache.spark.partial.{BoundedDouble, PartialResult} import org.apache.spark.Partitioner.defaultPartitioner @@ -710,6 +711,7 @@ class PairRDDFunctions[K: ClassTag, V: ClassTag](self: RDD[(K, V)]) if (valueClass == null) { throw new SparkException("Output value class not set") } +SparkHadoopUtil.get.addCredentials(conf) logDebug("Saving as hadoop file of type (" + keyClass.getSimpleName + ", " + valueClass.getSimpleName+ ")")
git commit: SPARK-1203 fix saving to hdfs from yarn
Repository: spark Updated Branches: refs/heads/master d55ec86de -> 6112270c9 SPARK-1203 fix saving to hdfs from yarn Author: Thomas Graves Closes #173 from tgravescs/SPARK-1203 and squashes the following commits: 4fd5ded [Thomas Graves] adding import 964e3f7 [Thomas Graves] SPARK-1203 fix saving to hdfs from yarn Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/6112270c Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/6112270c Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/6112270c Branch: refs/heads/master Commit: 6112270c94f1a30a461a91f6e56485a5eaec2606 Parents: d55ec86 Author: Thomas Graves Authored: Wed Mar 19 08:09:20 2014 -0500 Committer: Thomas Graves Committed: Wed Mar 19 08:09:20 2014 -0500 -- core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala | 2 ++ 1 file changed, 2 insertions(+) -- http://git-wip-us.apache.org/repos/asf/spark/blob/6112270c/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala -- diff --git a/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala b/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala index 447deaf..75fc02a 100644 --- a/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala +++ b/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala @@ -41,6 +41,7 @@ import org.apache.hadoop.mapreduce.lib.output.{FileOutputFormat => NewFileOutput import org.apache.hadoop.mapred.SparkHadoopWriter import org.apache.spark._ +import org.apache.spark.deploy.SparkHadoopUtil import org.apache.spark.Partitioner.defaultPartitioner import org.apache.spark.SparkContext._ import org.apache.spark.partial.{BoundedDouble, PartialResult} @@ -723,6 +724,7 @@ class PairRDDFunctions[K: ClassTag, V: ClassTag](self: RDD[(K, V)]) if (valueClass == null) { throw new SparkException("Output value class not set") } +SparkHadoopUtil.get.addCredentials(conf) logDebug("Saving as hadoop file of type (" + keyClass.getSimpleName + ", " + valueClass.getSimpleName + ")")