JavaSparkContext sc = new JavaSparkContext(conf);
>
> SparkSession spark = SparkSession
> .builder()
> .appName("Word Count")
> .getOrCreate()
> .newSession()
t;
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>&g
tion").setMaster("local[*]");
> JavaSparkContext sc = new JavaSparkContext(conf);
>
> SparkSession spark = SparkSession
> .builder()
> .appName("Word Count")
> .getOrCreate()
>
;>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>> public final class JavaWordCount {
>> private static
.newSession();
>
>
> JavaRDD lines = spark.read().textFile(args[0]).javaRDD();
>
>
> JavaRDD words = lines.flatMap(new FlatMapFunction<String,
> String>() {
> @Override
> public Iterator call(String s) {
> return A
ds = lines.flatMap(new FlatMapFunction<String,
> String>() {
> @Override
> public Iterator call(String s) {
> return Arrays.asList(SPACE.split(s)).iterator();
> }
> });
>
> JavaPairRDD<String, Integer> ones = words.mapToPair(
>
JavaPairRDD<String, Integer> ones = words.mapToPair(
> new PairFunction<String, String, Integer>() {
>@Override
>public Tuple2<String, Integer> call(String s) {
> return new Tuple2<>(s, 1);
>}
&g
nts.collect();
for (Tuple2 tuple : output) {
System.out.println(tuple._1() + ": " + tuple._2());
}
spark.stop();
}
}
--
View this message in context:
http://apache-spark-user-list.1001560.n