Github user pwendell commented on a diff in the pull request:

    https://github.com/apache/spark/pull/5732#discussion_r29538027
  
    --- Diff: 
streaming/src/main/scala/org/apache/spark/streaming/dstream/ReceiverInputDStream.scala
 ---
    @@ -67,27 +67,26 @@ abstract class ReceiverInputDStream[T: 
ClassTag](@transient ssc_ : StreamingCont
           } else {
             // Otherwise, ask the tracker for all the blocks that have been 
allocated to this stream
             // for this batch
    -        val blockInfos =
    -          
ssc.scheduler.receiverTracker.getBlocksOfBatch(validTime).get(id).getOrElse(Seq.empty)
    -        val blockStoreResults = blockInfos.map { _.blockStoreResult }
    -        val blockIds = blockStoreResults.map { 
_.blockId.asInstanceOf[BlockId] }.toArray
    +        val receiverTracker = ssc.scheduler.receiverTracker
    +        val blockInfos = 
receiverTracker.getBlocksOfBatch(validTime).getOrElse(id, Seq.empty)
    +        val blockIds = blockInfos.map { _.blockId.asInstanceOf[BlockId] 
}.toArray
     
    -        // Check whether all the results are of the same type
    -        val resultTypes = blockStoreResults.map { _.getClass }.distinct
    -        if (resultTypes.size > 1) {
    -          logWarning("Multiple result types in block information, WAL 
information will be ignored.")
    -        }
    +        // Are WAL record handles present with all the blocks
    +        val areWALRecordHandlesPresent = blockInfos.forall { 
_.walRecordHandleOption.nonEmpty }
     
    -        // If all the results are of type WriteAheadLogBasedStoreResult, 
then create
    -        // WriteAheadLogBackedBlockRDD else create simple BlockRDD.
    -        if (resultTypes.size == 1 && resultTypes.head == 
classOf[WriteAheadLogBasedStoreResult]) {
    -          val logSegments = blockStoreResults.map {
    -            _.asInstanceOf[WriteAheadLogBasedStoreResult].walRecordHandle
    -          }.toArray
    -          // Since storeInBlockManager = false, the storage level does not 
matter.
    -          new WriteAheadLogBackedBlockRDD[T](ssc.sparkContext,
    -            blockIds, logSegments, storeInBlockManager = false, 
StorageLevel.MEMORY_ONLY_SER)
    +        if (areWALRecordHandlesPresent) {
    +          // If all the blocks have WAL record handle, then create a 
WALBackedBlockRDD
    +          val isBlockIdValid = blockInfos.map { _.isBlockIdValid() 
}.toArray
    +          val walRecordHandles = blockInfos.map { 
_.walRecordHandleOption.get }.toArray
    +          new WriteAheadLogBackedBlockRDD[T](
    +            ssc.sparkContext, blockIds, walRecordHandles, isBlockIdValid)
             } else {
    +          // Else, create a BlockRDD. However, if there are some blocks 
with WAL info but not others
    +          // then that is unexpected and log a warning accordingly.
    +          if (blockInfos.find(_.walRecordHandleOption.nonEmpty).nonEmpty) {
    +            logWarning("Could not find Write Ahead Log information on some 
of the blocks, " +
    --- End diff --
    
    This seems like at least an `ERROR` level log right? Is there a code path 
that is expected where this branch is realized?


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to