java - AbstractMethodError in sample Mlib program -


i'm trying build sample recommender apache spark sample mlib recommender http://spark.apache.org/docs/1.2.1/mllib-collaborative-filtering.html#examples in java getting when build ( in idea intellij) output logs show

exception in thread "main" java.lang.abstractmethoderror

at org.apache.spark.logging$class.log(logging.scala:52)  @ org.apache.spark.mllib.recommendation.als.log(als.scala:94)  @ org.apache.spark.logging$class.loginfo(logging.scala:59) @ org.apache.spark.mllib.recommendation.als.loginfo(als.scala:94)   @ org.apache.spark.mllib.recommendation.als$$anonfun$run$1.apply$mcvi$sp(als.scala:232)     @ scala.collection.immutable.range.foreach$mvc$sp(range.scala:141)     @ org.apache.spark.mllib.recommendation.als.run(als.scala:230)     @ org.apache.spark.mllib.recommendation.als$.train(als.scala:599)     @ org.apache.spark.mllib.recommendation.als$.train(als.scala:616)     @ org.apache.spark.mllib.recommendation.als.train(als.scala)     @ sample.simpleapp.main(simpleapp.java:36)     @ sun.reflect.nativemethodaccessorimpl.invoke0(native method)     @ sun.reflect.nativemethodaccessorimpl.invoke(nativemethodaccessorimpl.java:62)     @ sun.reflect.delegatingmethodaccessorimpl.invoke(delegatingmethodaccessorimpl.java:43)     @ java.lang.reflect.method.invoke(method.java:497)     @ com.intellij.rt.execution.application.appmain.main(appmain.java:134) 

beginner spark , can tell me error ?

here source ( exaclty similar mlib docs 1 , except name of input file)

package sample;  import scala.tuple2;  import org.apache.spark.api.java.*; import org.apache.spark.api.java.function.function; import org.apache.spark.mllib.recommendation.als; import org.apache.spark.mllib.recommendation.matrixfactorizationmodel; import org.apache.spark.mllib.recommendation.rating; import org.apache.spark.sparkconf;   public class simpleapp {     public static void main(string[] args) {         sparkconf conf = new sparkconf().setappname("collaborative filtering example").setmaster("local");         javasparkcontext sc = new javasparkcontext(conf);          // load , parse data         string path = "/home/deeepak/somefile.txt";         javardd<string> data = sc.textfile(path);         javardd<rating> ratings = data.map(                 new function<string, rating>() {                     public rating call(string s) {                         string[] sarray = s.split(",");                         return new rating(integer.parseint(sarray[0]), integer.parseint(sarray[1]),                                 double.parsedouble(sarray[2]));                     }                 }         );            // build recommendation model using als         int rank = 10;         int numiterations = 20;         matrixfactorizationmodel model = als.train(javardd.tordd(ratings), 10, 20, 0.01);          // evaluate model on rating data         javardd<tuple2<object, object>> userproducts = ratings.map(                 new function<rating, tuple2<object, object>>() {                     public tuple2<object, object> call(rating r) {                         return new tuple2<object, object>(r.user(), r.product());                     }                 }         );         javapairrdd<tuple2<integer, integer>, double> predictions = javapairrdd.fromjavardd(                 model.predict(javardd.tordd(userproducts)).tojavardd().map(                         new function<rating, tuple2<tuple2<integer, integer>, double>>() {                             public tuple2<tuple2<integer, integer>, double> call(rating r){                                 return new tuple2<tuple2<integer, integer>, double>(                                         new tuple2<integer, integer>(r.user(), r.product()), r.rating());                             }                         }                 ));         javardd<tuple2<double, double>> ratesandpreds =                 javapairrdd.fromjavardd(ratings.map(                         new function<rating, tuple2<tuple2<integer, integer>, double>>() {                             public tuple2<tuple2<integer, integer>, double> call(rating r){                                 return new tuple2<tuple2<integer, integer>, double>(                                         new tuple2<integer, integer>(r.user(), r.product()), r.rating());                             }                         }                 )).join(predictions).values();         double mse = javadoublerdd.fromrdd(ratesandpreds.map(                 new function<tuple2<double, double>, object>() {                     public object call(tuple2<double, double> pair) {                         double err = pair._1() - pair._2();                         return err * err;                     }                 }         ).rdd()).mean();         system.out.println("mean squared error = " + mse);      } } 

the error seems on line 36 . java version used 1.8.40 , getting spark dependencies using maven

make sure have latest versions of spark , mlib

pom.xml:


<dependency>     <groupid>org.apache.spark</groupid>     <artifactid>spark-core_2.10</artifactid>     <version>1.3.1</version> </dependency>  <dependency>     <groupid>org.apache.spark</groupid>     <artifactid>spark-mllib_2.10</artifactid>     <version>1.3.1</version> </dependency> 

Comments

Popular posts from this blog

javascript - AngularJS custom datepicker directive -

javascript - jQuery date picker - Disable dates after the selection from the first date picker -