[ 
https://issues.apache.org/jira/browse/SPARK-8627?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=14601019#comment-14601019
 ] 

Subhod Lagade commented on SPARK-8627:
--------------------------------------

can you help me in resolving this ??
usersProducts is a RDD(int,int) it is still giving me error 


> ALS model predict error
> -----------------------
>
>                 Key: SPARK-8627
>                 URL: https://issues.apache.org/jira/browse/SPARK-8627
>             Project: Spark
>          Issue Type: Bug
>          Components: MLlib
>    Affects Versions: 1.4.0
>            Reporter: Subhod Lagade
>
> /**
>  * Created by subhod lagade on 25/06/15.
>  */
> import org.apache.spark.SparkConf
> import org.apache.spark.streaming.StreamingContext._
> import org.apache.spark.streaming.{Seconds, StreamingContext}
> import org.apache.spark.streaming._;
> import org.apache.spark.SparkContext
> import org.apache.spark.SparkContext._
> import java.io.BufferedReader;
> import java.io.FileInputStream;
> import java.io.IOException;
> import java.io.InputStreamReader;
> import java.io.PrintStream;
> import java.net.ServerSocket;
> import java.net.Socket;
> import java.util.Properties;
> import org.apache.spark.mllib.recommendation.ALS
> import org.apache.spark.mllib.recommendation.MatrixFactorizationModel
> import org.apache.spark.mllib.recommendation.Rating
> object SparkStreamKafka {
>   def main(args: Array[String]) {
>       
>     val conf = new SparkConf().setAppName("Simple Application");
>     val sc = new SparkContext(conf);
>       val data = sc.textFile("/home/appadmin/Disney/data.csv");
>       val ratings = data.map(_.split(',') match { case Array(user, product, 
> rate) =>  Rating(user.toInt, product.toInt, rate.toDouble)  });
>       
>       
>       val rank = 3;
>       val numIterations = 2;
>       val model = ALS.train(ratings,rank,numIterations,0.01);
>       
>       val usersProducts = ratings.map{ case Rating(user, product, rate)  => 
> (user, product)}
>       // Build the recommendation model using ALS
>       usersProducts.foreach(println)
>       val predictions = model.predict(usersProducts)
>       }
> }
> /*
> ERROR Message
> [ERROR] 
> /home/appadmin/disneypoc/src/main/scala/org/capgemini/SparkKafka.scala:53: 
> error: not enough arguments for method predict: (user: Int, product: 
> Int)Double.
> [INFO] Unspecified value parameter product.
> [INFO]  val predictions = model.predict(usersProducts)
> */



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to