Github user cloud-fan commented on a diff in the pull request:

    https://github.com/apache/spark/pull/19788#discussion_r169227734
  
    --- Diff: 
common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockHandler.java
 ---
    @@ -196,42 +196,51 @@ private ShuffleMetrics() {
         private final String appId;
         private final String execId;
         private final int shuffleId;
    -    // An array containing mapId and reduceId pairs.
    -    private final int[] mapIdAndReduceIds;
    +    // An array containing mapId, reduceId and numBlocks tuple
    +    private final int[] shuffleBlockIds;
     
         ManagedBufferIterator(String appId, String execId, String[] blockIds) {
           this.appId = appId;
           this.execId = execId;
           String[] blockId0Parts = blockIds[0].split("_");
    -      if (blockId0Parts.length != 4 || 
!blockId0Parts[0].equals("shuffle")) {
    +      // length == 4: ShuffleBlockId
    +      // length == 5: ContinuousShuffleBlockId
    +      if (!(blockId0Parts.length == 4 || blockId0Parts.length == 5) ||
    +        !blockId0Parts[0].equals("shuffle")) {
             throw new IllegalArgumentException("Unexpected shuffle block id 
format: " + blockIds[0]);
           }
           this.shuffleId = Integer.parseInt(blockId0Parts[1]);
    -      mapIdAndReduceIds = new int[2 * blockIds.length];
    +      shuffleBlockIds = new int[3 * blockIds.length];
           for (int i = 0; i < blockIds.length; i++) {
             String[] blockIdParts = blockIds[i].split("_");
    -        if (blockIdParts.length != 4 || 
!blockIdParts[0].equals("shuffle")) {
    +        if (!(blockIdParts.length == 4 || blockIdParts.length == 5) ||
    +          !blockIdParts[0].equals("shuffle")) {
    --- End diff --
    
    shall we create a `boolean isShuffleBlock(String[] blockIdParts)`?


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to