Sure, here it is.
{
"comment":" This config file uses default settings for all but the
required values see README.md for docs",
"id": "default",
"description": "Default settings",
"engineFactory": "com.actionml.RecommendationEngine",
"datasource": {
"params" : {
"name": "sample-handmad",
"appName": "np",
"eventNames": ["read", "search", "view", "category-pref"],
"minEventsPerUser": 1,
"eventWindow": {
"duration": "300 days",
"removeDuplicates": true,
"compressProperties": true
}
}
},
"sparkConf": {
"spark.serializer": "org.apache.spark.serializer.KryoSerializer",
"spark.kryo.registrator":
"org.apache.mahout.sparkbindings.io.MahoutKryoRegistrator",
"spark.kryo.referenceTracking": "false",
"spark.kryoserializer.buffer": "300m",
"spark.executor.memory": "4g",
"spark.executor.cores": "2",
"spark.task.cpus": "2",
"spark.default.parallelism": "16",
"es.index.auto.create": "true"
},
"algorithms": [
{
"comment": "simplest setup where all values are default, popularity
based backfill, must add eventsNames",
"name": "ur",
"params": {
"appName": "np",
"indexName": "np",
"typeName": "items",
"blacklistEvents": [],
"comment": "must have data for the first event or the model will
not build, other events are optional",
"indicators": [
{
"name": "read"
},{
"name": "search",
"maxCorrelatorsPerItem": 5
},{
"name": "category-pref",
"maxCorrelatorsPerItem": 50
},{
"name": "view",
"maxCorrelatorsPerItem": 50
}
],
"expireDateName": "itemExpiry",
"dateName": "date",
"num": 5
}
}
]
}
On Mon, Jun 18, 2018 at 8:55 PM Pat Ferrel <[email protected]> wrote:
> This sounds like some missing required config in engine.json. Can you
> share the file?
>
>
> From: Anuj Kumar <[email protected]>
> <[email protected]>
> Reply: [email protected] <[email protected]>
> <[email protected]>
> Date: June 18, 2018 at 5:05:22 AM
> To: [email protected] <[email protected]>
> <[email protected]>
> Subject: java.util.NoSuchElementException: head of empty list when
> running train
>
> Getting this while running "pio train". Please help
>
> Exception in thread "main" java.util.NoSuchElementException: head of empty
> list
>
> at scala.collection.immutable.Nil$.head(List.scala:420)
>
> at scala.collection.immutable.Nil$.head(List.scala:417)
>
> at
> org.apache.mahout.math.cf.SimilarityAnalysis$.crossOccurrenceDownsampled(SimilarityAnalysis.scala:177)
>
> at com.actionml.URAlgorithm.calcAll(URAlgorithm.scala:343)
>
> at com.actionml.URAlgorithm.train(URAlgorithm.scala:295)
>
> at com.actionml.URAlgorithm.train(URAlgorithm.scala:180)
>
> at
> org.apache.predictionio.controller.P2LAlgorithm.trainBase(P2LAlgorithm.scala:49)
>
> at
> org.apache.predictionio.controller.Engine$$anonfun$18.apply(Engine.scala:690)
>
> at
> org.apache.predictionio.controller.Engine$$anonfun$18.apply(Engine.scala:690)
>
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
>
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
>
> at scala.collection.immutable.List.foreach(List.scala:381)
>
> at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
>
> at scala.collection.immutable.List.map(List.scala:285)
>
> at org.apache.predictionio.controller.Engine$.train(Engine.scala:690)
>
> at org.apache.predictionio.controller.Engine.train(Engine.scala:176)
>
> at
> org.apache.predictionio.workflow.CoreWorkflow$.runTrain(CoreWorkflow.scala:67)
>
> at
> org.apache.predictionio.workflow.CreateWorkflow$.main(CreateWorkflow.scala:251)
>
> at
> org.apache.predictionio.workflow.CreateWorkflow.main(CreateWorkflow.scala)
>
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>
> at java.lang.reflect.Method.invoke(Method.java:498)
>
> at
> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:743)
>
> at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:187)
>
> at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:212)
>
> at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:126)
>
> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>
>
> --
> -
> Best,
> Anuj Kumar
>
>
--
-
Best,
Anuj Kumar