[hadoop] branch trunk updated: HADOOP-17027. Add tests for reading fair call queue capacity weight configs. Contributed by Fengnan Li

2020-05-07 Thread liuml07
This is an automated email from the ASF dual-hosted git repository.

liuml07 pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
 new e9e1ead  HADOOP-17027. Add tests for reading fair call queue capacity 
weight configs. Contributed by Fengnan Li
e9e1ead is described below

commit e9e1ead089c0b9f5f1788361329a64fec6561352
Author: Mingliang Liu 
AuthorDate: Thu May 7 16:50:23 2020 -0700

HADOOP-17027. Add tests for reading fair call queue capacity weight 
configs. Contributed by Fengnan Li
---
 .../apache/hadoop/ipc/TestCallQueueManager.java| 55 ++
 .../org/apache/hadoop/ipc/TestFairCallQueue.java   | 25 --
 2 files changed, 65 insertions(+), 15 deletions(-)

diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestCallQueueManager.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestCallQueueManager.java
index bb4717e..38b3fe5 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestCallQueueManager.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestCallQueueManager.java
@@ -176,6 +176,12 @@ public class TestCallQueueManager {
   private static final Class schedulerClass
   = CallQueueManager.convertSchedulerClass(DefaultRpcScheduler.class);
 
+  private static final Class> fcqueueClass
+  = CallQueueManager.convertQueueClass(FairCallQueue.class, 
FakeCall.class);
+
+  private static final Class rpcSchedulerClass
+  = CallQueueManager.convertSchedulerClass(DecayRpcScheduler.class);
+
   @Test
   public void testCallQueueCapacity() throws InterruptedException {
 manager = new CallQueueManager(queueClass, schedulerClass, false,
@@ -319,6 +325,55 @@ public class TestCallQueueManager {
 assertEquals(totalCallsConsumed, totalCallsCreated);
   }
 
+  @Test
+  public void testQueueCapacity() throws InterruptedException {
+int capacity = 4;
+String ns = "ipc.8020";
+conf.setInt("ipc.8020.scheduler.priority.levels", 2);
+conf.set("ipc.8020.callqueue.capacity.weights", "1,3");
+manager = new CallQueueManager<>(fcqueueClass, rpcSchedulerClass, false,
+capacity, ns, conf);
+
+// insert 4 calls with 2 at each priority
+// since the queue with priority 0 has only 1 capacity, the second call
+// with p0 will be overflowed to queue with priority 1
+for (int i = 0; i < capacity; i++) {
+  FakeCall fc = new FakeCall(i);
+  fc.setPriorityLevel(i%2);
+  manager.put(fc);
+}
+
+// get calls, the order should be
+// call 0 with p0
+// call 1 with p1
+// call 2 with p0 since overflow
+// call 3 with p1
+assertEquals(manager.take().priorityLevel, 0);
+assertEquals(manager.take().priorityLevel, 1);
+assertEquals(manager.take().priorityLevel, 0);
+assertEquals(manager.take().priorityLevel, 1);
+
+conf.set("ipc.8020.callqueue.capacity.weights", "1,1");
+manager = new CallQueueManager<>(fcqueueClass, rpcSchedulerClass, false,
+capacity, ns, conf);
+
+for (int i = 0; i < capacity; i++) {
+  FakeCall fc = new FakeCall(i);
+  fc.setPriorityLevel(i%2);
+  manager.put(fc);
+}
+
+// get calls, the order should be
+// call 0 with p0
+// call 2 with p0
+// call 1 with p1
+// call 3 with p1
+assertEquals(manager.take().priorityLevel, 0);
+assertEquals(manager.take().priorityLevel, 0);
+assertEquals(manager.take().priorityLevel, 1);
+assertEquals(manager.take().priorityLevel, 1);
+  }
+
   public static class ExceptionFakeCall implements Schedulable {
 public ExceptionFakeCall() {
   throw new IllegalArgumentException("Exception caused by call queue " +
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestFairCallQueue.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestFairCallQueue.java
index f478957..1fed9a3 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestFairCallQueue.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestFairCallQueue.java
@@ -170,18 +170,13 @@ public class TestFairCallQueue {
 // default weights i.e. all queues share capacity
 fcq = new FairCallQueue(numQueues, 4, "ns", conf);
 FairCallQueue fcq1 = new FairCallQueue(
-numQueues, capacity, "ns", new int[]{3, 1}, conf);
+numQueues, capacity, "ns", new int[]{1, 3}, conf);
 
 for (int i=0; i < capacity; i++) {
   Schedulable call = mockCall("u", i%2);
   calls.add(call);
   fcq.add(call);
   fcq1.add(call);
-
-  call = mockCall("u", (i++)%2);
-  calls.add(call);
-  fcq.add(call);
-  fcq1.add(call);
 }
 
 final AtomicInteger currentIndex = new AtomicInteger();
@@ -200,24 +195,24 @@ public 

[hadoop] branch branch-3.3 updated: HDFS-15339. TestHDFSCLI fails for user names with the dot/dash character. Contributed by Yan Xiaole.

2020-05-07 Thread ayushsaxena
This is an automated email from the ASF dual-hosted git repository.

ayushsaxena pushed a commit to branch branch-3.3
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/branch-3.3 by this push:
 new 8178141  HDFS-15339. TestHDFSCLI fails for user names with the 
dot/dash character. Contributed by Yan Xiaole.
8178141 is described below

commit 817814105aba66c90a616c87854bbba749d74a17
Author: Ayush Saxena 
AuthorDate: Fri May 8 00:53:49 2020 +0530

HDFS-15339. TestHDFSCLI fails for user names with the dot/dash character. 
Contributed by Yan Xiaole.
---
 .../src/test/resources/testHDFSConf.xml| 48 +++---
 1 file changed, 24 insertions(+), 24 deletions(-)

diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
index ea327c4..6142a1a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
@@ -1060,9 +1060,9 @@
 
   RegexpAcrossOutputComparator
   Found 3 items
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file1
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file2
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/file3
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file1
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file2
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/file3
 
   
 
@@ -1081,9 +1081,9 @@
 
   RegexpAcrossOutputComparator
   Found 3 items
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file3
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file2
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/file1
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file3
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file2
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/file1
 
   
 
@@ -1105,10 +1105,10 @@
 
   RegexpAcrossOutputComparator
   Found 4 items
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*120( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data120bytes
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*60( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data60bytes
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*30( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data30bytes
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*15( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/data15bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*120( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data120bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*60( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data60bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*30( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data30bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*15( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/data15bytes
 
   
 
@@ -1130,10 +1130,10 @@
 
   RegexpAcrossOutputComparator
   Found 4 items
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*15( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data15bytes
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*30( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data30bytes
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*60( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data60bytes
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*120( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/data120bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*15( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data15bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*30( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data30bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*60( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data60bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*120( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/data120bytes
 

[hadoop] branch branch-3.1 updated: HDFS-15339. TestHDFSCLI fails for user names with the dot/dash character. Contributed by Yan Xiaole.

2020-05-07 Thread ayushsaxena
This is an automated email from the ASF dual-hosted git repository.

ayushsaxena pushed a commit to branch branch-3.1
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/branch-3.1 by this push:
 new 8988f33  HDFS-15339. TestHDFSCLI fails for user names with the 
dot/dash character. Contributed by Yan Xiaole.
8988f33 is described below

commit 8988f33ac34c81880a1f7cbbee018c14625a2186
Author: Ayush Saxena 
AuthorDate: Fri May 8 00:53:49 2020 +0530

HDFS-15339. TestHDFSCLI fails for user names with the dot/dash character. 
Contributed by Yan Xiaole.
---
 .../src/test/resources/testHDFSConf.xml| 48 +++---
 1 file changed, 24 insertions(+), 24 deletions(-)

diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
index a13c441..a029c94 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
@@ -1060,9 +1060,9 @@
 
   RegexpAcrossOutputComparator
   Found 3 items
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file1
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file2
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/file3
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file1
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file2
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/file3
 
   
 
@@ -1081,9 +1081,9 @@
 
   RegexpAcrossOutputComparator
   Found 3 items
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file3
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file2
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/file1
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file3
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file2
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/file1
 
   
 
@@ -1105,10 +1105,10 @@
 
   RegexpAcrossOutputComparator
   Found 4 items
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*120( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data120bytes
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*60( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data60bytes
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*30( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data30bytes
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*15( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/data15bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*120( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data120bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*60( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data60bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*30( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data30bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*15( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/data15bytes
 
   
 
@@ -1130,10 +1130,10 @@
 
   RegexpAcrossOutputComparator
   Found 4 items
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*15( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data15bytes
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*30( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data30bytes
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*60( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data60bytes
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*120( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/data120bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*15( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data15bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*30( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data30bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*60( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data60bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*120( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/data120bytes
 

[hadoop] branch branch-3.2 updated: HDFS-15339. TestHDFSCLI fails for user names with the dot/dash character. Contributed by Yan Xiaole.

2020-05-07 Thread ayushsaxena
This is an automated email from the ASF dual-hosted git repository.

ayushsaxena pushed a commit to branch branch-3.2
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/branch-3.2 by this push:
 new c3fffa8  HDFS-15339. TestHDFSCLI fails for user names with the 
dot/dash character. Contributed by Yan Xiaole.
c3fffa8 is described below

commit c3fffa8451df22202760dbd4842d0fa1423b4139
Author: Ayush Saxena 
AuthorDate: Fri May 8 00:53:49 2020 +0530

HDFS-15339. TestHDFSCLI fails for user names with the dot/dash character. 
Contributed by Yan Xiaole.
---
 .../src/test/resources/testHDFSConf.xml| 48 +++---
 1 file changed, 24 insertions(+), 24 deletions(-)

diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
index 4ab093b..d729a24 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
@@ -1060,9 +1060,9 @@
 
   RegexpAcrossOutputComparator
   Found 3 items
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file1
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file2
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/file3
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file1
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file2
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/file3
 
   
 
@@ -1081,9 +1081,9 @@
 
   RegexpAcrossOutputComparator
   Found 3 items
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file3
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file2
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/file1
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file3
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file2
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/file1
 
   
 
@@ -1105,10 +1105,10 @@
 
   RegexpAcrossOutputComparator
   Found 4 items
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*120( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data120bytes
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*60( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data60bytes
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*30( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data30bytes
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*15( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/data15bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*120( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data120bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*60( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data60bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*30( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data30bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*15( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/data15bytes
 
   
 
@@ -1130,10 +1130,10 @@
 
   RegexpAcrossOutputComparator
   Found 4 items
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*15( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data15bytes
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*30( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data30bytes
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*60( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data60bytes
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*120( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/data120bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*15( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data15bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*30( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data30bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*60( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data60bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*120( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/data120bytes
 

[hadoop] branch trunk updated: HDFS-15339. TestHDFSCLI fails for user names with the dot/dash character. Contributed by Yan Xiaole.

2020-05-07 Thread ayushsaxena
This is an automated email from the ASF dual-hosted git repository.

ayushsaxena pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
 new d59de27  HDFS-15339. TestHDFSCLI fails for user names with the 
dot/dash character. Contributed by Yan Xiaole.
d59de27 is described below

commit d59de27c01d7f74a15471fb6021ecf9cd54c7025
Author: Ayush Saxena 
AuthorDate: Fri May 8 00:53:49 2020 +0530

HDFS-15339. TestHDFSCLI fails for user names with the dot/dash character. 
Contributed by Yan Xiaole.
---
 .../src/test/resources/testHDFSConf.xml| 48 +++---
 1 file changed, 24 insertions(+), 24 deletions(-)

diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
index ea327c4..6142a1a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
@@ -1060,9 +1060,9 @@
 
   RegexpAcrossOutputComparator
   Found 3 items
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file1
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file2
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/file3
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file1
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file2
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/file3
 
   
 
@@ -1081,9 +1081,9 @@
 
   RegexpAcrossOutputComparator
   Found 3 items
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file3
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file2
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/file1
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file3
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/file2
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*0( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/file1
 
   
 
@@ -1105,10 +1105,10 @@
 
   RegexpAcrossOutputComparator
   Found 4 items
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*120( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data120bytes
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*60( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data60bytes
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*30( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data30bytes
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*15( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/data15bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*120( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data120bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*60( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data60bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*30( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data30bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*15( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/data15bytes
 
   
 
@@ -1130,10 +1130,10 @@
 
   RegexpAcrossOutputComparator
   Found 4 items
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*15( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data15bytes
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*30( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data30bytes
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*60( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data60bytes
--rw-r--r--( )*1( )*[a-zA-z0-9]*( )*supergroup( )*120( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/data120bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*15( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data15bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*30( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data30bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*60( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( )*/dir/data60bytes
+-rw-r--r--( )*1( )*USERNAME( )*supergroup( )*120( 
)*[0-9]{4,}-[0-9]{2,}-[0-9]{2,} [0-9]{2,}:[0-9]{2,}( 
)*/dir/data120bytes
 
   
 

[hadoop] branch branch-3.1 updated: YARN-9444. YARN API ResourceUtils's getRequestedResourcesFromConfig doesn't recognize yarn.io/gpu as a valid resource. Contributed by Gergely Pollak

2020-05-07 Thread epayne
This is an automated email from the ASF dual-hosted git repository.

epayne pushed a commit to branch branch-3.1
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/branch-3.1 by this push:
 new 7e6c5e5  YARN-9444. YARN API ResourceUtils's 
getRequestedResourcesFromConfig doesn't recognize yarn.io/gpu as a valid 
resource. Contributed by Gergely Pollak
7e6c5e5 is described below

commit 7e6c5e5ad2768499d181af3a8b21874d064b9197
Author: Szilard Nemeth 
AuthorDate: Tue Nov 26 16:42:33 2019 +0100

YARN-9444. YARN API ResourceUtils's getRequestedResourcesFromConfig doesn't 
recognize yarn.io/gpu as a valid resource. Contributed by Gergely Pollak

(cherry picked from commit 52e9ee39a12ce91b3a545603dcf1103518ad2920)
---
 .../hadoop/yarn/util/resource/ResourceUtils.java   |  4 +-
 .../yarn/util/resource/TestResourceUtils.java  | 45 ++
 2 files changed, 48 insertions(+), 1 deletion(-)

diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/resource/ResourceUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/resource/ResourceUtils.java
index 142dafa..ea21d5e 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/resource/ResourceUtils.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/resource/ResourceUtils.java
@@ -66,6 +66,8 @@ public class ResourceUtils {
   "^(((\\p{Alnum}([\\p{Alnum}-]*\\p{Alnum})?\\.)*"
   + "\\p{Alnum}([\\p{Alnum}-]*\\p{Alnum})?)/)?\\p{Alpha}([\\w.-]*)$");
 
+  public static final String YARN_IO_OPTIONAL = "(yarn\\.io/)?";
+
   private static volatile boolean initializedResources = false;
   private static final Map RESOURCE_NAME_TO_INDEX =
   new ConcurrentHashMap();
@@ -657,7 +659,7 @@ public class ResourceUtils {
   Configuration configuration, String prefix) {
 List result = new ArrayList<>();
 Map customResourcesMap = configuration
-.getValByRegex("^" + Pattern.quote(prefix) + "[^.]+$");
+.getValByRegex("^" + Pattern.quote(prefix) + YARN_IO_OPTIONAL + 
"[^.]+$");
 for (Entry resource : customResourcesMap.entrySet()) {
   String resourceName = resource.getKey().substring(prefix.length());
   Matcher matcher =
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/resource/TestResourceUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/resource/TestResourceUtils.java
index 2671de8..1eb7991 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/resource/TestResourceUtils.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/resource/TestResourceUtils.java
@@ -31,8 +31,12 @@ import org.junit.Before;
 import org.junit.Test;
 
 import java.io.File;
+import java.util.Arrays;
 import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
 /**
  * Test class to verify all resource utility methods.
@@ -153,6 +157,47 @@ public class TestResourceUtils {
   }
 
   @Test
+  public void testGetRequestedResourcesFromConfig() {
+Configuration conf = new Configuration();
+
+//these resource type configurations should be recognised
+String propertyPrefix = "mapreduce.mapper.proper.rt.";
+String[] expectedKeys = {
+"yarn.io/gpu",
+"yarn.io/fpga",
+"yarn.io/anything_without_a_dot",
+"regular_rt",
+"regular_rt/with_slash"};
+
+String[] invalidKeys = {
+propertyPrefix + "too.many_parts",
+propertyPrefix + "yarn.notio/gpu",
+"incorrect.prefix.yarn.io/gpu",
+propertyPrefix + "yarn.io/",
+propertyPrefix};
+
+for (String s : expectedKeys) {
+  //setting the properties which are expected to be in the resource list
+  conf.set(propertyPrefix + s, "42");
+}
+
+for (String s : invalidKeys) {
+  //setting the properties which are expected to be in the resource list
+  conf.set(s, "24");
+}
+
+List properList =
+ResourceUtils.getRequestedResourcesFromConfig(conf, propertyPrefix);
+Set expectedSet =
+new HashSet<>(Arrays.asList(expectedKeys));
+
+Assert.assertEquals(properList.size(), expectedKeys.length);
+properList.forEach(
+item -> Assert.assertTrue(expectedSet.contains(item.getName(;
+
+  }
+
+  @Test
   public void testGetResourceTypesConfigErrors() throws Exception {
 Configuration conf = new YarnConfiguration();
 


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: 

[hadoop] branch branch-3.2 updated: YARN-9444. YARN API ResourceUtils's getRequestedResourcesFromConfig doesn't recognize yarn.io/gpu as a valid resource. Contributed by Gergely Pollak

2020-05-07 Thread epayne
This is an automated email from the ASF dual-hosted git repository.

epayne pushed a commit to branch branch-3.2
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/branch-3.2 by this push:
 new d345994  YARN-9444. YARN API ResourceUtils's 
getRequestedResourcesFromConfig doesn't recognize yarn.io/gpu as a valid 
resource. Contributed by Gergely Pollak
d345994 is described below

commit d3459944686137212b78f6864da49446a2380ae8
Author: Szilard Nemeth 
AuthorDate: Tue Nov 26 16:42:33 2019 +0100

YARN-9444. YARN API ResourceUtils's getRequestedResourcesFromConfig doesn't 
recognize yarn.io/gpu as a valid resource. Contributed by Gergely Pollak

(cherry picked from commit 52e9ee39a12ce91b3a545603dcf1103518ad2920)
---
 .../hadoop/yarn/util/resource/ResourceUtils.java   |  4 +-
 .../yarn/util/resource/TestResourceUtils.java  | 45 ++
 2 files changed, 48 insertions(+), 1 deletion(-)

diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/resource/ResourceUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/resource/ResourceUtils.java
index fd8be24..5061604 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/resource/ResourceUtils.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/resource/ResourceUtils.java
@@ -67,6 +67,8 @@ public class ResourceUtils {
   "^(((\\p{Alnum}([\\p{Alnum}-]*\\p{Alnum})?\\.)*"
   + "\\p{Alnum}([\\p{Alnum}-]*\\p{Alnum})?)/)?\\p{Alpha}([\\w.-]*)$");
 
+  public static final String YARN_IO_OPTIONAL = "(yarn\\.io/)?";
+
   private static volatile boolean initializedResources = false;
   private static final Map RESOURCE_NAME_TO_INDEX =
   new ConcurrentHashMap();
@@ -664,7 +666,7 @@ public class ResourceUtils {
   Configuration configuration, String prefix) {
 List result = new ArrayList<>();
 Map customResourcesMap = configuration
-.getValByRegex("^" + Pattern.quote(prefix) + "[^.]+$");
+.getValByRegex("^" + Pattern.quote(prefix) + YARN_IO_OPTIONAL + 
"[^.]+$");
 for (Entry resource : customResourcesMap.entrySet()) {
   String resourceName = resource.getKey().substring(prefix.length());
   Matcher matcher =
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/resource/TestResourceUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/resource/TestResourceUtils.java
index 95cf83e..5a6ebf9 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/resource/TestResourceUtils.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/resource/TestResourceUtils.java
@@ -37,8 +37,12 @@ import org.slf4j.LoggerFactory;
 import java.io.File;
 import java.io.IOException;
 import java.net.URL;
+import java.util.Arrays;
 import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
 /**
  * Test class to verify all resource utility methods.
@@ -204,6 +208,47 @@ public class TestResourceUtils {
   }
 
   @Test
+  public void testGetRequestedResourcesFromConfig() {
+Configuration conf = new Configuration();
+
+//these resource type configurations should be recognised
+String propertyPrefix = "mapreduce.mapper.proper.rt.";
+String[] expectedKeys = {
+"yarn.io/gpu",
+"yarn.io/fpga",
+"yarn.io/anything_without_a_dot",
+"regular_rt",
+"regular_rt/with_slash"};
+
+String[] invalidKeys = {
+propertyPrefix + "too.many_parts",
+propertyPrefix + "yarn.notio/gpu",
+"incorrect.prefix.yarn.io/gpu",
+propertyPrefix + "yarn.io/",
+propertyPrefix};
+
+for (String s : expectedKeys) {
+  //setting the properties which are expected to be in the resource list
+  conf.set(propertyPrefix + s, "42");
+}
+
+for (String s : invalidKeys) {
+  //setting the properties which are expected to be in the resource list
+  conf.set(s, "24");
+}
+
+List properList =
+ResourceUtils.getRequestedResourcesFromConfig(conf, propertyPrefix);
+Set expectedSet =
+new HashSet<>(Arrays.asList(expectedKeys));
+
+Assert.assertEquals(properList.size(), expectedKeys.length);
+properList.forEach(
+item -> Assert.assertTrue(expectedSet.contains(item.getName(;
+
+  }
+
+  @Test
   public void testGetResourceTypesConfigErrors() throws IOException {
 Configuration conf = new YarnConfiguration();
 


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, 

[hadoop] branch trunk updated: HADOOP-17018. Intermittent failing of ITestAbfsStreamStatistics in ABFS (#1990)

2020-05-07 Thread stevel
This is an automated email from the ASF dual-hosted git repository.

stevel pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
 new 192cad9  HADOOP-17018. Intermittent failing of 
ITestAbfsStreamStatistics in ABFS (#1990)
192cad9 is described below

commit 192cad9ee24779cbd7735fdf9da0fba90255d546
Author: Mehakmeet Singh 
AuthorDate: Thu May 7 16:45:28 2020 +0530

HADOOP-17018. Intermittent failing of ITestAbfsStreamStatistics in ABFS 
(#1990)


Contributed by: Mehakmeet Singh

In some cases, ABFS-prefetch thread runs in the background which returns 
some bytes from the buffer and gives an extra readOp. Thus, making readOps 
values arbitrary and giving intermittent failures in some cases. Hence, readOps 
values of 2 or 3 are seen in different setups.
---
 .../hadoop/fs/azurebfs/ITestAbfsStreamStatistics.java | 19 ++-
 1 file changed, 14 insertions(+), 5 deletions(-)

diff --git 
a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsStreamStatistics.java
 
b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsStreamStatistics.java
index b749f49..51531f6 100644
--- 
a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsStreamStatistics.java
+++ 
b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsStreamStatistics.java
@@ -84,12 +84,21 @@ public class ITestAbfsStreamStatistics extends 
AbstractAbfsIntegrationTest {
 
   LOG.info("Result of Read operation : {}", result);
   /*
-  Testing if 2 read_ops value is coming after reading full content from a
-  file (3 if anything to read from Buffer too).
-  Reason: read() call gives read_ops=1,
-  reading from AbfsClient(http GET) gives read_ops=2.
+   * Testing if 2 read_ops value is coming after reading full content
+   * from a file (3 if anything to read from Buffer too). Reason: read()
+   * call gives read_ops=1, reading from AbfsClient(http GET) gives
+   * read_ops=2.
+   *
+   * In some cases ABFS-prefetch thread runs in the background which
+   * returns some bytes from buffer and gives an extra readOp.
+   * Thus, making readOps values arbitrary and giving intermittent
+   * failures in some cases. Hence, readOps values of 2 or 3 is seen in
+   * different setups.
+   *
*/
-  assertReadWriteOps("read", 2, statistics.getReadOps());
+  assertTrue(String.format("The actual value of %d was not equal to the "
+  + "expected value of 2 or 3", statistics.getReadOps()),
+  statistics.getReadOps() == 2 || statistics.getReadOps() == 3);
 
 } finally {
   IOUtils.cleanupWithLogger(LOG, inForOneOperation,


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org