See <https://builds.apache.org/job/Mahout-Quality/2875/>
------------------------------------------
[...truncated 6231 lines...]
[32m- C = A %*% B incompatible B keys[0m
[32m- Spark-specific C = At %*% B , join[0m
[32m- C = At %*% B , join, String-keyed[0m
[32m- C = At %*% B , zippable, String-keyed[0m
{
0 => {0:26.0,1:35.0,2:46.0,3:51.0}
1 => {0:50.0,1:69.0,2:92.0,3:105.0}
2 => {0:62.0,1:86.0,2:115.0,3:132.0}
3 => {0:74.0,1:103.0,2:138.0,3:159.0}
}
[32m- C = A %*% inCoreB[0m
{
0 => {0:26.0,1:35.0,2:46.0,3:51.0}
1 => {0:50.0,1:69.0,2:92.0,3:105.0}
2 => {0:62.0,1:86.0,2:115.0,3:132.0}
3 => {0:74.0,1:103.0,2:138.0,3:159.0}
}
[32m- C = inCoreA %*%: B[0m
[32m- C = A.t %*% A[0m
[32m- C = A.t %*% A fat non-graph[0m
[32m- C = A.t %*% A non-int key[0m
[32m- C = A + B[0m
A=
{
0 => {0:1.0,1:2.0,2:3.0}
1 => {0:3.0,1:4.0,2:5.0}
2 => {0:5.0,1:6.0,2:7.0}
}
B=
{
0 => {0:0.3016352775165334,1:0.4033616890500241,2:0.24617539094020902}
1 => {0:0.007969135790208748,1:0.7844429658867654,2:0.6879141705693822}
2 => {0:0.2857409160055201,1:0.3340123257915044,2:0.6992849710796821}
}
C=
{
0 => {0:1.3016352775165334,1:2.403361689050024,2:3.246175390940209}
1 => {0:3.0079691357902085,1:4.7844429658867655,2:5.687914170569382}
2 => {0:5.28574091600552,1:6.334012325791504,2:7.699284971079682}
}
[32m- C = A + B, identically partitioned[0m
[32m- C = A + B side test 1[0m
[32m- C = A + B side test 2[0m
[32m- C = A + B side test 3[0m
[32m- Ax[0m
[32m- A'x[0m
[32m- colSums, colMeans[0m
[32m- rowSums, rowMeans[0m
[32m- A.diagv[0m
[32m- numNonZeroElementsPerColumn[0m
[32m- C = A cbind B, cogroup[0m
[32m- C = A cbind B, zip[0m
[32m- B = A + 1.0[0m
[32m- C = A rbind B[0m
[32m- C = A rbind B, with empty[0m
[32m- scalarOps[0m
0 [Executor task launch worker-1] ERROR org.apache.spark.executor.Executor -
Exception in task 9.0 in stage 245.0 (TID 543)
java.io.IOException: PARSING_ERROR(2)
at org.xerial.snappy.SnappyNative.throw_error(SnappyNative.java:78)
at org.xerial.snappy.SnappyNative.uncompressedLength(Native Method)
at org.xerial.snappy.Snappy.uncompressedLength(Snappy.java:545)
at
org.xerial.snappy.SnappyInputStream.readFully(SnappyInputStream.java:125)
at
org.xerial.snappy.SnappyInputStream.readHeader(SnappyInputStream.java:88)
at org.xerial.snappy.SnappyInputStream.<init>(SnappyInputStream.java:58)
at
org.apache.spark.io.SnappyCompressionCodec.compressedInputStream(CompressionCodec.scala:128)
at
org.apache.spark.broadcast.TorrentBroadcast$.unBlockifyObject(TorrentBroadcast.scala:232)
at
org.apache.spark.broadcast.TorrentBroadcast.readObject(TorrentBroadcast.scala:169)
at sun.reflect.GeneratedMethodAccessor8.invoke(Unknown Source)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at
java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:969)
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1871)
at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1775)
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1327)
at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1969)
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893)
at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1775)
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1327)
at java.io.ObjectInputStream.readObject(ObjectInputStream.java:349)
at
org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:62)
at
org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:87)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:159)
at
java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:895)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:918)
at java.lang.Thread.run(Thread.java:662)
0 [Executor task launch worker-0] ERROR org.apache.spark.executor.Executor -
Exception in task 8.0 in stage 245.0 (TID 542)
java.io.IOException: PARSING_ERROR(2)
at org.xerial.snappy.SnappyNative.throw_error(SnappyNative.java:78)
at org.xerial.snappy.SnappyNative.uncompressedLength(Native Method)
at org.xerial.snappy.Snappy.uncompressedLength(Snappy.java:545)
at
org.xerial.snappy.SnappyInputStream.readFully(SnappyInputStream.java:125)
at
org.xerial.snappy.SnappyInputStream.readHeader(SnappyInputStream.java:88)
at org.xerial.snappy.SnappyInputStream.<init>(SnappyInputStream.java:58)
at
org.apache.spark.io.SnappyCompressionCodec.compressedInputStream(CompressionCodec.scala:128)
at
org.apache.spark.broadcast.TorrentBroadcast$.unBlockifyObject(TorrentBroadcast.scala:232)
at
org.apache.spark.broadcast.TorrentBroadcast.readObject(TorrentBroadcast.scala:169)
at sun.reflect.GeneratedMethodAccessor8.invoke(Unknown Source)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at
java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:969)
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1871)
at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1775)
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1327)
at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1969)
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893)
at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1775)
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1327)
at java.io.ObjectInputStream.readObject(ObjectInputStream.java:349)
at
org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:62)
at
org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:87)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:159)
at
java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:895)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:918)
at java.lang.Thread.run(Thread.java:662)
0 [Executor task launch worker-2] ERROR org.apache.spark.executor.Executor -
Exception in task 7.0 in stage 245.0 (TID 541)
java.io.IOException: PARSING_ERROR(2)
at org.xerial.snappy.SnappyNative.throw_error(SnappyNative.java:78)
at org.xerial.snappy.SnappyNative.uncompressedLength(Native Method)
at org.xerial.snappy.Snappy.uncompressedLength(Snappy.java:545)
at
org.xerial.snappy.SnappyInputStream.readFully(SnappyInputStream.java:125)
at
org.xerial.snappy.SnappyInputStream.readHeader(SnappyInputStream.java:88)
at org.xerial.snappy.SnappyInputStream.<init>(SnappyInputStream.java:58)
at
org.apache.spark.io.SnappyCompressionCodec.compressedInputStream(CompressionCodec.scala:128)
at
org.apache.spark.broadcast.TorrentBroadcast$.unBlockifyObject(TorrentBroadcast.scala:232)
at
org.apache.spark.broadcast.TorrentBroadcast.readObject(TorrentBroadcast.scala:169)
at sun.reflect.GeneratedMethodAccessor8.invoke(Unknown Source)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at
java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:969)
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1871)
at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1775)
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1327)
at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1969)
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893)
at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1775)
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1327)
at java.io.ObjectInputStream.readObject(ObjectInputStream.java:349)
at
org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:62)
at
org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:87)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:159)
at
java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:895)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:918)
at java.lang.Thread.run(Thread.java:662)
17 [Result resolver thread-3] ERROR org.apache.spark.scheduler.TaskSetManager
- Task 9 in stage 245.0 failed 1 times; aborting job
[31m- C = A + B missing rows *** FAILED ***[0m
[31m org.apache.spark.SparkException: Job aborted due to stage failure: Task
9 in stage 245.0 failed 1 times, most recent failure: Lost task 9.0 in stage
245.0 (TID 543, localhost): java.io.IOException: PARSING_ERROR(2)[0m
[31m
org.xerial.snappy.SnappyNative.throw_error(SnappyNative.java:78)[0m
[31m org.xerial.snappy.SnappyNative.uncompressedLength(Native
Method)[0m
[31m org.xerial.snappy.Snappy.uncompressedLength(Snappy.java:545)[0m
[31m
org.xerial.snappy.SnappyInputStream.readFully(SnappyInputStream.java:125)[0m
[31m
org.xerial.snappy.SnappyInputStream.readHeader(SnappyInputStream.java:88)[0m
[31m
org.xerial.snappy.SnappyInputStream.<init>(SnappyInputStream.java:58)[0m
[31m
org.apache.spark.io.SnappyCompressionCodec.compressedInputStream(CompressionCodec.scala:128)[0m
[31m
org.apache.spark.broadcast.TorrentBroadcast$.unBlockifyObject(TorrentBroadcast.scala:232)[0m
[31m
org.apache.spark.broadcast.TorrentBroadcast.readObject(TorrentBroadcast.scala:169)[0m
[31m sun.reflect.GeneratedMethodAccessor8.invoke(Unknown Source)[0m
[31m
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)[0m
[31m java.lang.reflect.Method.invoke(Method.java:597)[0m
[31m
java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:969)[0m
[31m
java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1871)[0m
[31m
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1775)[0m
[31m
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1327)[0m
[31m
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1969)[0m
[31m
java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893)[0m
[31m
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1775)[0m
[31m
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1327)[0m
[31m
java.io.ObjectInputStream.readObject(ObjectInputStream.java:349)[0m
[31m
org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:62)[0m
[31m
org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:87)[0m
[31m
org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:159)[0m
[31m
java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:895)[0m
[31m
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:918)[0m
[31m java.lang.Thread.run(Thread.java:662)[0m
[31mDriver stacktrace:[0m
[31m at
org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1185)[0m
[31m at
org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1174)[0m
[31m at
org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1173)[0m
[31m at
scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)[0m
[31m at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)[0m
[31m at
org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1173)[0m
[31m at
org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:688)[0m
[31m at
org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:688)[0m
[31m at scala.Option.foreach(Option.scala:236)[0m
[31m at
org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:688)[0m
[31m ...[0m
[32m- C = cbind(A, B) with missing rows[0m
collected A =
{
0 => {0:1.0,1:2.0,2:3.0}
1 => {}
2 => {}
3 => {0:3.0,1:4.0,2:5.0}
}
collected B =
{
2 => {0:1.0,1:1.0,2:1.0}
1 => {0:1.0,1:1.0,2:1.0}
3 => {0:4.0,1:5.0,2:6.0}
0 => {0:2.0,1:3.0,2:4.0}
}
[32m- B = A + 1.0 missing rows[0m
[36mRun completed in 1 minute, 35 seconds.[0m
[36mTotal number of tests run: 75[0m
[36mSuites: completed 10, aborted 0[0m
[36mTests: succeeded 74, failed 1, canceled 0, ignored 1, pending 0[0m
[31m*** 1 TEST FAILED ***[0m
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO]
[INFO] Mahout Build Tools ................................ SUCCESS [5.412s]
[INFO] Apache Mahout ..................................... SUCCESS [2.142s]
[INFO] Mahout Math ....................................... SUCCESS [2:17.190s]
[INFO] Mahout MapReduce Legacy ........................... SUCCESS [11:08.955s]
[INFO] Mahout Integration ................................ SUCCESS [1:36.112s]
[INFO] Mahout Examples ................................... SUCCESS [51.407s]
[INFO] Mahout Release Package ............................ SUCCESS [0.114s]
[INFO] Mahout Math Scala bindings ........................ SUCCESS [2:02.916s]
[INFO] Mahout Spark bindings ............................. FAILURE [2:18.218s]
[INFO] Mahout Spark bindings shell ....................... SKIPPED
[INFO] Mahout H2O backend ................................ SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 20:24.635s
[INFO] Finished at: Wed Nov 26 17:32:58 UTC 2014
[INFO] Final Memory: 88M/445M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.scalatest:scalatest-maven-plugin:1.0-M2:test
(test) on project mahout-spark_2.10: There are test failures -> [Help 1]
[ERROR]
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e
switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR]
[ERROR] For more information about the errors and possible solutions, please
read the following articles:
[ERROR] [Help 1]
http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR]
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR] mvn <goals> -rf :mahout-spark_2.10
Build step 'Invoke top-level Maven targets' marked build as failure
[PMD] Skipping publisher since build result is FAILURE
[TASKS] Skipping publisher since build result is FAILURE
Archiving artifacts
Sending artifact delta relative to Mahout-Quality #2874
Archived 72 artifacts
Archive block size is 32768
Received 3048 blocks and 36816273 bytes
Compression is 73.1%
Took 35 sec
Recording test results
Publishing Javadoc