data = sc.textFile(/home/amit/testData.csv).cache()
val result = data.mapPartitions(pLines).groupByKey
//val list = result.filter(x= {(x._1).contains(24050881)})
}
}
Here groupByKey is not working . But same thing is working from
spark-shell.
Please help me
sc = new SparkContext(conf)
val data = sc.textFile(/home/amit/testData.csv).cache()
val result = data.mapPartitions(pLines).groupByKey
//val list = result.filter(x= {(x._1).contains(24050881)})
}
}
Here groupByKey is not working . But same thing is working from
*spark
= data.mapPartitions(pLines).groupByKey
//val list = result.filter(x= {(x._1).contains(24050881)})
}
}
Here groupByKey is not working . But same thing is working from
spark-shell.
Please help me
Thanks
Amit
SparkConf().setAppName(Spark Job).setMaster(local)
val sc = new SparkContext(conf)
val data = sc.textFile(/home/amit/testData.csv).cache()
val result = data.mapPartitions(pLines).groupByKey
//val list = result.filter(x= {(x._1).contains(24050881)})
}
}
Here groupByKey
().setAppName(Spark
Job).setMaster(local)
val sc = new SparkContext(conf)
val data = sc.textFile(/home/amit/testData.csv).cache()
val result = data.mapPartitions(pLines).groupByKey
//val list = result.filter(x= {(x._1).contains(24050881)})
}
}
Here groupByKey
)})
}
}
Here groupByKey is not working . But same thing is working from *spark-shell.*
Please help me
Thanks
Amit
result = data.mapPartitions(pLines).groupByKey
//val list = result.filter(x= {(x._1).contains(24050881)})
}
}
Here groupByKey is not working . But same thing is working from *spark-shell.*
Please help me
Thanks
Amit
().setAppName(Spark Job).setMaster(local)
val sc = new SparkContext(conf)
val data = sc.textFile(/home/amit/testData.csv).cache()
val result = data.mapPartitions(pLines).groupByKey
//val list = result.filter(x= {(x._1).contains(24050881)})
}
}
Here groupByKey is not working