Co-authored-by: Ashutosh Gupta <ashugpt@amazon.com> Signed-off-by: Akira Ajisaka <aajisaka@apache.org>
@@ -138,6 +138,26 @@
<artifactId>bcprov-jdk15on</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.junit.jupiter</groupId>
+ <artifactId>junit-jupiter-api</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <artifactId>junit-jupiter-engine</artifactId>
+ <artifactId>junit-jupiter-params</artifactId>
+ <groupId>org.junit.platform</groupId>
+ <artifactId>junit-platform-launcher</artifactId>
<dependency>
<groupId>com.sun.jersey.jersey-test-framework</groupId>
<artifactId>jersey-test-framework-grizzly2</artifactId>
@@ -20,11 +20,11 @@ package org.apache.hadoop.yarn;
import java.util.Iterator;
+import org.apache.hadoop.thirdparty.com.google.common.collect.Iterators;
+
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
-import org.apache.hadoop.thirdparty.com.google.common.collect.Iterators;
-
/**
* Utilities to generate fake test apps
*/
@@ -24,8 +24,10 @@ import java.net.SocketTimeoutException;
import java.util.ArrayList;
import java.util.List;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.net.NetUtils;
@@ -35,12 +37,12 @@ import org.apache.hadoop.yarn.api.ContainerManagementProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.CommitResponse;
import org.apache.hadoop.yarn.api.protocolrecords.ContainerUpdateRequest;
import org.apache.hadoop.yarn.api.protocolrecords.ContainerUpdateResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetLocalizationStatusesRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetLocalizationStatusesResponse;
import org.apache.hadoop.yarn.api.protocolrecords.IncreaseContainersResourceRequest;
import org.apache.hadoop.yarn.api.protocolrecords.IncreaseContainersResourceResponse;
-import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse;
import org.apache.hadoop.yarn.api.protocolrecords.ReInitializeContainerRequest;
import org.apache.hadoop.yarn.api.protocolrecords.ReInitializeContainerResponse;
import org.apache.hadoop.yarn.api.protocolrecords.ResourceLocalizationRequest;
@@ -70,8 +72,9 @@ import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC;
import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.security.ContainerTokenIdentifier;
-import org.junit.Assert;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.fail;
/*
* Test that the container launcher rpc times out properly. This is used
@@ -86,7 +89,7 @@ public class TestContainerLaunchRPC {
.getRecordFactory(null);
@Test
- public void testHadoopProtoRPCTimeout() throws Exception {
+ void testHadoopProtoRPCTimeout() throws Exception {
testRPCTimeout(HadoopYarnProtoRPC.class.getName());
}
@@ -136,16 +139,15 @@ public class TestContainerLaunchRPC {
proxy.startContainers(allRequests);
} catch (Exception e) {
LOG.info(StringUtils.stringifyException(e));
- Assert.assertEquals("Error, exception is not: "
- + SocketTimeoutException.class.getName(),
- SocketTimeoutException.class.getName(), e.getClass().getName());
+ assertEquals(SocketTimeoutException.class.getName(), e.getClass().getName(),
+ "Error, exception is not: " + SocketTimeoutException.class.getName());
return;
} finally {
server.stop();
- Assert.fail("timeout exception should have occurred!");
+ fail("timeout exception should have occurred!");
public static Token newContainerToken(NodeId nodeId, byte[] password,
@@ -18,14 +18,15 @@
package org.apache.hadoop.yarn;
import org.apache.log4j.Logger;
import org.apache.log4j.PatternLayout;
public class TestContainerLogAppender {
- public void testAppendInClose() throws Exception {
+ void testAppendInClose() throws Exception {
final ContainerLogAppender claAppender = new ContainerLogAppender();
claAppender.setName("testCLA");
claAppender.setLayout(new PatternLayout("%-5p [%t]: %m%n"));
@@ -18,8 +18,16 @@
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.SocketTimeoutException;
+import java.util.ArrayList;
+import java.util.List;
@@ -59,14 +67,9 @@ import org.apache.hadoop.yarn.exceptions.YarnException;
-import java.io.IOException;
-import java.net.InetSocketAddress;
-import java.net.SocketTimeoutException;
-import java.util.ArrayList;
-import java.util.List;
* Test that the container resource increase rpc times out properly.
@@ -78,7 +81,7 @@ public class TestContainerResourceIncreaseRPC {
TestContainerResourceIncreaseRPC.class);
@@ -122,15 +125,14 @@ public class TestContainerResourceIncreaseRPC {
proxy.updateContainer(request);
@@ -157,11 +159,9 @@ public class TestContainerResourceIncreaseRPC {
@Override
- public StopContainersResponse
- stopContainers(StopContainersRequest requests) throws YarnException,
- IOException {
- Exception e = new Exception("Dummy function", new Exception(
- "Dummy function cause"));
+ public StopContainersResponse stopContainers(StopContainersRequest requests)
+ throws YarnException, IOException {
+ Exception e = new Exception("Dummy function", new Exception("Dummy function cause"));
throw new YarnException(e);
@@ -21,7 +21,7 @@ package org.apache.hadoop.yarn;
import java.io.IOException;
import java.net.InetSocketAddress;
@@ -37,16 +37,16 @@ import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.factories.impl.pb.RpcClientFactoryPBImpl;
import org.apache.hadoop.yarn.factories.impl.pb.RpcServerFactoryPBImpl;
public class TestRPCFactories {
- public void test() {
+ void test() {
testPbServerFactory();
testPbClientFactory();
@@ -64,7 +64,7 @@ public class TestRPCFactories {
server.start();
} catch (YarnRuntimeException e) {
e.printStackTrace();
- Assert.fail("Failed to create server");
+ fail("Failed to create server");
if (server != null) {
@@ -92,12 +92,12 @@ public class TestRPCFactories {
amrmClient = (ApplicationMasterProtocol) RpcClientFactoryPBImpl.get().getClient(ApplicationMasterProtocol.class, 1, NetUtils.getConnectAddress(server), conf);
- Assert.fail("Failed to create client");
+ fail("Failed to create client");
@@ -18,39 +18,41 @@
-import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
-import org.apache.hadoop.yarn.factories.RecordFactory;
-import org.apache.hadoop.yarn.factories.impl.pb.RecordFactoryPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.AllocateRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.AllocateResponsePBImpl;
+import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
+import org.apache.hadoop.yarn.factories.RecordFactory;
+import org.apache.hadoop.yarn.factories.impl.pb.RecordFactoryPBImpl;
public class TestRecordFactory {
- public void testPbRecordFactory() {
+ void testPbRecordFactory() {
RecordFactory pbRecordFactory = RecordFactoryPBImpl.get();
try {
AllocateResponse response =
pbRecordFactory.newRecordInstance(AllocateResponse.class);
- Assert.assertEquals(AllocateResponsePBImpl.class, response.getClass());
+ assertEquals(AllocateResponsePBImpl.class, response.getClass());
- Assert.fail("Failed to crete record");
+ fail("Failed to crete record");
AllocateRequest response =
pbRecordFactory.newRecordInstance(AllocateRequest.class);
- Assert.assertEquals(AllocateRequestPBImpl.class, response.getClass());
+ assertEquals(AllocateRequestPBImpl.class, response.getClass());
@@ -18,7 +18,7 @@
import org.apache.hadoop.yarn.conf.YarnConfiguration;
@@ -28,21 +28,23 @@ import org.apache.hadoop.yarn.factories.RpcServerFactory;
import org.apache.hadoop.yarn.factory.providers.RpcFactoryProvider;
public class TestRpcFactoryProvider {
- public void testFactoryProvider() {
+ void testFactoryProvider() {
Configuration conf = new Configuration();
RpcClientFactory clientFactory = null;
RpcServerFactory serverFactory = null;
clientFactory = RpcFactoryProvider.getClientFactory(conf);
serverFactory = RpcFactoryProvider.getServerFactory(conf);
- Assert.assertEquals(RpcClientFactoryPBImpl.class, clientFactory.getClass());
- Assert.assertEquals(RpcServerFactoryPBImpl.class, serverFactory.getClass());
+ assertEquals(RpcClientFactoryPBImpl.class, clientFactory.getClass());
+ assertEquals(RpcServerFactoryPBImpl.class, serverFactory.getClass());
conf.set(YarnConfiguration.IPC_CLIENT_FACTORY_CLASS, "unknown");
conf.set(YarnConfiguration.IPC_SERVER_FACTORY_CLASS, "unknown");
@@ -50,28 +52,30 @@ public class TestRpcFactoryProvider {
- Assert.fail("Expected an exception - unknown serializer");
+ fail("Expected an exception - unknown serializer");
conf = new Configuration();
conf.set(YarnConfiguration.IPC_CLIENT_FACTORY_CLASS, "NonExistantClass");
conf.set(YarnConfiguration.IPC_SERVER_FACTORY_CLASS, RpcServerFactoryPBImpl.class.getName());
- Assert.fail("Expected an exception - unknown class");
+ fail("Expected an exception - unknown class");
- Assert.fail("Error while loading factory using reflection: [" + RpcServerFactoryPBImpl.class.getName() + "]");
+ fail(
+ "Error while loading factory using reflection: [" + RpcServerFactoryPBImpl.class.getName()
+ + "]");
@@ -18,18 +18,20 @@
-import static org.junit.Assert.assertSame;
-import static org.mockito.Mockito.spy;
-import static org.mockito.Mockito.verify;
import org.apache.hadoop.util.ExitUtil;
+import static org.junit.jupiter.api.Assertions.assertSame;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.verify;
public class TestYarnUncaughtExceptionHandler {
private static final YarnUncaughtExceptionHandler exHandler =
new YarnUncaughtExceptionHandler();
* Throw {@code YarnRuntimeException} inside thread and
* check {@code YarnUncaughtExceptionHandler} instance
@@ -37,7 +39,7 @@ public class TestYarnUncaughtExceptionHandler {
* @throws InterruptedException
- public void testUncaughtExceptionHandlerWithRuntimeException()
+ void testUncaughtExceptionHandlerWithRuntimeException()
throws InterruptedException {
final YarnUncaughtExceptionHandler spyYarnHandler = spy(exHandler);
final YarnRuntimeException yarnException = new YarnRuntimeException(
@@ -67,7 +69,7 @@ public class TestYarnUncaughtExceptionHandler {
- public void testUncaughtExceptionHandlerWithError()
+ void testUncaughtExceptionHandlerWithError()
ExitUtil.disableSystemExit();
final YarnUncaughtExceptionHandler spyErrorHandler = spy(exHandler);
@@ -96,7 +98,7 @@ public class TestYarnUncaughtExceptionHandler {
- public void testUncaughtExceptionHandlerWithOutOfMemoryError()
+ void testUncaughtExceptionHandlerWithOutOfMemoryError()
ExitUtil.disableSystemHalt();
final YarnUncaughtExceptionHandler spyOomHandler = spy(exHandler);
@@ -17,6 +17,25 @@
package org.apache.hadoop.yarn.api;
+import java.lang.reflect.Array;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.Method;
+import java.lang.reflect.Modifier;
+import java.lang.reflect.ParameterizedType;
+import java.lang.reflect.Type;
+import java.nio.ByteBuffer;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Random;
+import java.util.Set;
+import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.commons.lang3.Range;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.util.Lists;
@@ -24,20 +43,12 @@ import org.apache.hadoop.util.Sets;
import org.apache.hadoop.yarn.api.resource.PlacementConstraint;
import org.apache.hadoop.yarn.api.resource.PlacementConstraints;
-import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import java.lang.reflect.*;
-import java.nio.ByteBuffer;
-import java.util.*;
import static org.apache.hadoop.yarn.api.resource.PlacementConstraints.NODE;
-import static org.apache.hadoop.yarn.api.resource.PlacementConstraints
- .PlacementTargets.allocationTag;
+import static org.apache.hadoop.yarn.api.resource.PlacementConstraints.PlacementTargets.allocationTag;
import static org.apache.hadoop.yarn.api.resource.PlacementConstraints.targetIn;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
* Generic helper class to validate protocol records.
@@ -223,7 +234,7 @@ public class BasePBImplRecordsTest {
p.getMethod = m;
ret.put(propertyName, p);
} else {
- Assert.fail("Multiple get method with same name: " + recordClass
+ fail("Multiple get method with same name: " + recordClass
+ p.propertyName);
@@ -284,18 +295,17 @@ public class BasePBImplRecordsTest {
gsp.setMethod.invoke(origRecord, gsp.testValue);
Object ret = getProto.invoke(origRecord);
- Assert.assertNotNull(recordClass.getName() + "#getProto returns null", ret);
+ assertNotNull(ret, recordClass.getName() + "#getProto returns null");
if (!(protoClass.isAssignableFrom(ret.getClass()))) {
- Assert.fail("Illegal getProto method return type: " + ret.getClass());
+ fail("Illegal getProto method return type: " + ret.getClass());
R deserRecord = pbConstructor.newInstance(ret);
- Assert.assertEquals("whole " + recordClass + " records should be equal",
- origRecord, deserRecord);
+ assertEquals(origRecord, deserRecord, "whole " + recordClass + " records should be equal");
for (GetSetPair gsp : getSetPairs.values()) {
Object origValue = gsp.getMethod.invoke(origRecord);
Object deserValue = gsp.getMethod.invoke(deserRecord);
- Assert.assertEquals("property " + recordClass.getName() + "#"
- + gsp.propertyName + " should be equal", origValue, deserValue);
+ assertEquals(origValue, deserValue, "property " + recordClass.getName() + "#"
+ + gsp.propertyName + " should be equal");
@@ -19,41 +19,45 @@
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
public class TestApplicationAttemptId {
- public void testApplicationAttemptId() {
+ void testApplicationAttemptId() {
ApplicationAttemptId a1 = createAppAttemptId(10l, 1, 1);
ApplicationAttemptId a2 = createAppAttemptId(10l, 1, 2);
ApplicationAttemptId a3 = createAppAttemptId(10l, 2, 1);
ApplicationAttemptId a4 = createAppAttemptId(8l, 1, 4);
ApplicationAttemptId a5 = createAppAttemptId(10l, 1, 1);
- Assert.assertTrue(a1.equals(a5));
- Assert.assertFalse(a1.equals(a2));
- Assert.assertFalse(a1.equals(a3));
- Assert.assertFalse(a1.equals(a4));
- Assert.assertTrue(a1.compareTo(a5) == 0);
- Assert.assertTrue(a1.compareTo(a2) < 0);
- Assert.assertTrue(a1.compareTo(a3) < 0);
- Assert.assertTrue(a1.compareTo(a4) > 0);
- Assert.assertTrue(a1.hashCode() == a5.hashCode());
- Assert.assertFalse(a1.hashCode() == a2.hashCode());
- Assert.assertFalse(a1.hashCode() == a3.hashCode());
- Assert.assertFalse(a1.hashCode() == a4.hashCode());
+ assertEquals(a1, a5);
+ assertNotEquals(a1, a2);
+ assertNotEquals(a1, a3);
+ assertNotEquals(a1, a4);
+ assertTrue(a1.compareTo(a5) == 0);
+ assertTrue(a1.compareTo(a2) < 0);
+ assertTrue(a1.compareTo(a3) < 0);
+ assertTrue(a1.compareTo(a4) > 0);
+ assertTrue(a1.hashCode() == a5.hashCode());
+ assertFalse(a1.hashCode() == a2.hashCode());
+ assertFalse(a1.hashCode() == a3.hashCode());
+ assertFalse(a1.hashCode() == a4.hashCode());
long ts = System.currentTimeMillis();
ApplicationAttemptId a6 = createAppAttemptId(ts, 543627, 33492611);
- Assert.assertEquals("appattempt_10_0001_000001", a1.toString());
- Assert.assertEquals("appattempt_" + ts + "_543627_33492611", a6.toString());
+ assertEquals("appattempt_10_0001_000001", a1.toString());
+ assertEquals("appattempt_" + ts + "_543627_33492611", a6.toString());
private ApplicationAttemptId createAppAttemptId(
@@ -18,36 +18,40 @@
public class TestApplicationId {
- public void testApplicationId() {
+ void testApplicationId() {
ApplicationId a1 = ApplicationId.newInstance(10l, 1);
ApplicationId a2 = ApplicationId.newInstance(10l, 2);
ApplicationId a3 = ApplicationId.newInstance(10l, 1);
ApplicationId a4 = ApplicationId.newInstance(8l, 3);
- Assert.assertTrue(a1.equals(a3));
+ assertEquals(a1, a3);
+ assertTrue(a1.compareTo(a3) == 0);
- Assert.assertTrue(a1.compareTo(a3) == 0);
+ assertTrue(a1.hashCode() == a3.hashCode());
+ assertFalse(a2.hashCode() == a4.hashCode());
- Assert.assertTrue(a1.hashCode() == a3.hashCode());
- Assert.assertFalse(a2.hashCode() == a4.hashCode());
ApplicationId a5 = ApplicationId.newInstance(ts, 45436343);
- Assert.assertEquals("application_10_0001", a1.toString());
- Assert.assertEquals("application_" + ts + "_45436343", a5.toString());
+ assertEquals("application_10_0001", a1.toString());
+ assertEquals("application_" + ts + "_45436343", a5.toString());
@@ -18,6 +18,8 @@
import org.apache.hadoop.yarn.api.records.ApplicationReport;
@@ -25,13 +27,15 @@ import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.Priority;
+import static org.junit.jupiter.api.Assertions.assertNotSame;
+import static org.junit.jupiter.api.Assertions.assertNull;
public class TestApplicatonReport {
- public void testApplicationReport() {
+ void testApplicationReport() {
long timestamp = System.currentTimeMillis();
ApplicationReport appReport1 =
createApplicationReport(1, 1, timestamp);
@@ -39,15 +43,15 @@ public class TestApplicatonReport {
ApplicationReport appReport3 =
- Assert.assertEquals(appReport1, appReport2);
- Assert.assertEquals(appReport2, appReport3);
+ assertEquals(appReport1, appReport2);
+ assertEquals(appReport2, appReport3);
appReport1.setApplicationId(null);
- Assert.assertNull(appReport1.getApplicationId());
- Assert.assertNotSame(appReport1, appReport2);
+ assertNull(appReport1.getApplicationId());
+ assertNotSame(appReport1, appReport2);
appReport2.setCurrentApplicationAttemptId(null);
- Assert.assertNull(appReport2.getCurrentApplicationAttemptId());
- Assert.assertNotSame(appReport2, appReport3);
- Assert.assertNull(appReport1.getAMRMToken());
+ assertNull(appReport2.getCurrentApplicationAttemptId());
+ assertNotSame(appReport2, appReport3);
+ assertNull(appReport1.getAMRMToken());
protected static ApplicationReport createApplicationReport(
@@ -19,57 +19,61 @@
import org.apache.hadoop.yarn.api.records.ContainerId;
public class TestContainerId {
- public void testContainerId() {
+ void testContainerId() {
ContainerId c1 = newContainerId(1, 1, 10l, 1);
ContainerId c2 = newContainerId(1, 1, 10l, 2);
ContainerId c3 = newContainerId(1, 1, 10l, 1);
ContainerId c4 = newContainerId(1, 3, 10l, 1);
ContainerId c5 = newContainerId(1, 3, 8l, 1);
- Assert.assertTrue(c1.equals(c3));
- Assert.assertFalse(c1.equals(c2));
- Assert.assertFalse(c1.equals(c4));
- Assert.assertFalse(c1.equals(c5));
+ assertEquals(c1, c3);
+ assertNotEquals(c1, c2);
+ assertNotEquals(c1, c4);
+ assertNotEquals(c1, c5);
+ assertTrue(c1.compareTo(c3) == 0);
+ assertTrue(c1.compareTo(c2) < 0);
+ assertTrue(c1.compareTo(c4) < 0);
+ assertTrue(c1.compareTo(c5) > 0);
- Assert.assertTrue(c1.compareTo(c3) == 0);
- Assert.assertTrue(c1.compareTo(c2) < 0);
- Assert.assertTrue(c1.compareTo(c4) < 0);
- Assert.assertTrue(c1.compareTo(c5) > 0);
+ assertTrue(c1.hashCode() == c3.hashCode());
+ assertFalse(c1.hashCode() == c2.hashCode());
+ assertFalse(c1.hashCode() == c4.hashCode());
+ assertFalse(c1.hashCode() == c5.hashCode());
- Assert.assertTrue(c1.hashCode() == c3.hashCode());
- Assert.assertFalse(c1.hashCode() == c2.hashCode());
- Assert.assertFalse(c1.hashCode() == c4.hashCode());
- Assert.assertFalse(c1.hashCode() == c5.hashCode());
ContainerId c6 = newContainerId(36473, 4365472, ts, 25645811);
- Assert.assertEquals("container_10_0001_01_000001", c1.toString());
- Assert.assertEquals(25645811, 0xffffffffffL & c6.getContainerId());
- Assert.assertEquals(0, c6.getContainerId() >> 40);
- Assert.assertEquals("container_" + ts + "_36473_4365472_25645811",
+ assertEquals("container_10_0001_01_000001", c1.toString());
+ assertEquals(25645811, 0xffffffffffL & c6.getContainerId());
+ assertEquals(0, c6.getContainerId() >> 40);
+ assertEquals("container_" + ts + "_36473_4365472_25645811",
c6.toString());
ContainerId c7 = newContainerId(36473, 4365472, ts, 4298334883325L);
- Assert.assertEquals(999799999997L, 0xffffffffffL & c7.getContainerId());
- Assert.assertEquals(3, c7.getContainerId() >> 40);
- Assert.assertEquals(
+ assertEquals(999799999997L, 0xffffffffffL & c7.getContainerId());
+ assertEquals(3, c7.getContainerId() >> 40);
+ assertEquals(
"container_e03_" + ts + "_36473_4365472_999799999997",
c7.toString());
ContainerId c8 = newContainerId(36473, 4365472, ts, 844424930131965L);
- Assert.assertEquals(1099511627773L, 0xffffffffffL & c8.getContainerId());
- Assert.assertEquals(767, c8.getContainerId() >> 40);
+ assertEquals(1099511627773L, 0xffffffffffL & c8.getContainerId());
+ assertEquals(767, c8.getContainerId() >> 40);
"container_e767_" + ts + "_36473_4365472_1099511627773",
c8.toString());
@@ -21,93 +21,86 @@ import java.util.EnumSet;
import java.util.HashSet;
import java.util.Set;
import org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationsRequestPBImpl;
public class TestGetApplicationsRequest {
- public void testGetApplicationsRequest(){
+ void testGetApplicationsRequest() {
GetApplicationsRequest request = GetApplicationsRequest.newInstance();
- EnumSet<YarnApplicationState> appStates =
- EnumSet.of(YarnApplicationState.ACCEPTED);
+ EnumSet<YarnApplicationState> appStates =
+ EnumSet.of(YarnApplicationState.ACCEPTED);
request.setApplicationStates(appStates);
Set<String> tags = new HashSet<String>();
tags.add("tag1");
request.setApplicationTags(tags);
Set<String> types = new HashSet<String>();
types.add("type1");
request.setApplicationTypes(types);
long startBegin = System.currentTimeMillis();
long startEnd = System.currentTimeMillis() + 1;
request.setStartRange(startBegin, startEnd);
long finishBegin = System.currentTimeMillis() + 2;
long finishEnd = System.currentTimeMillis() + 3;
request.setFinishRange(finishBegin, finishEnd);
long limit = 100L;
request.setLimit(limit);
Set<String> queues = new HashSet<String>();
queues.add("queue1");
request.setQueues(queues);
Set<String> users = new HashSet<String>();
users.add("user1");
request.setUsers(users);
ApplicationsRequestScope scope = ApplicationsRequestScope.ALL;
request.setScope(scope);
GetApplicationsRequest requestFromProto = new GetApplicationsRequestPBImpl(
- ((GetApplicationsRequestPBImpl)request).getProto());
+ ((GetApplicationsRequestPBImpl) request).getProto());
// verify the whole record equals with original record
- Assert.assertEquals(requestFromProto, request);
+ assertEquals(requestFromProto, request);
// verify all properties are the same as original request
- "ApplicationStates from proto is not the same with original request",
- requestFromProto.getApplicationStates(), appStates);
- "ApplicationTags from proto is not the same with original request",
- requestFromProto.getApplicationTags(), tags);
- "ApplicationTypes from proto is not the same with original request",
- requestFromProto.getApplicationTypes(), types);
- "StartRange from proto is not the same with original request",
- requestFromProto.getStartRange(), Range.between(startBegin, startEnd));
- "FinishRange from proto is not the same with original request",
- requestFromProto.getFinishRange(),
- Range.between(finishBegin, finishEnd));
- "Limit from proto is not the same with original request",
- requestFromProto.getLimit(), limit);
- "Queues from proto is not the same with original request",
- requestFromProto.getQueues(), queues);
- "Users from proto is not the same with original request",
- requestFromProto.getUsers(), users);
+ assertEquals(requestFromProto.getApplicationStates(), appStates,
+ "ApplicationStates from proto is not the same with original request");
+ assertEquals(requestFromProto.getApplicationTags(), tags,
+ "ApplicationTags from proto is not the same with original request");
+ assertEquals(requestFromProto.getApplicationTypes(), types,
+ "ApplicationTypes from proto is not the same with original request");
+ assertEquals(requestFromProto.getStartRange(), Range.between(startBegin, startEnd),
+ "StartRange from proto is not the same with original request");
+ assertEquals(requestFromProto.getFinishRange(), Range.between(finishBegin, finishEnd),
+ "FinishRange from proto is not the same with original request");
+ assertEquals(requestFromProto.getLimit(), limit,
+ "Limit from proto is not the same with original request");
+ assertEquals(requestFromProto.getQueues(), queues,
+ "Queues from proto is not the same with original request");
+ assertEquals(requestFromProto.getUsers(), users,
+ "Users from proto is not the same with original request");
@@ -18,32 +18,36 @@
import org.apache.hadoop.yarn.api.records.NodeId;
public class TestNodeId {
- public void testNodeId() {
+ void testNodeId() {
NodeId nodeId1 = NodeId.newInstance("10.18.52.124", 8041);
NodeId nodeId2 = NodeId.newInstance("10.18.52.125", 8038);
NodeId nodeId3 = NodeId.newInstance("10.18.52.124", 8041);
NodeId nodeId4 = NodeId.newInstance("10.18.52.124", 8039);
- Assert.assertTrue(nodeId1.equals(nodeId3));
- Assert.assertFalse(nodeId1.equals(nodeId2));
- Assert.assertFalse(nodeId3.equals(nodeId4));
+ assertEquals(nodeId1, nodeId3);
+ assertNotEquals(nodeId1, nodeId2);
+ assertNotEquals(nodeId3, nodeId4);
- Assert.assertTrue(nodeId1.compareTo(nodeId3) == 0);
- Assert.assertTrue(nodeId1.compareTo(nodeId2) < 0);
- Assert.assertTrue(nodeId3.compareTo(nodeId4) > 0);
+ assertTrue(nodeId1.compareTo(nodeId3) == 0);
+ assertTrue(nodeId1.compareTo(nodeId2) < 0);
+ assertTrue(nodeId3.compareTo(nodeId4) > 0);
- Assert.assertTrue(nodeId1.hashCode() == nodeId3.hashCode());
- Assert.assertFalse(nodeId1.hashCode() == nodeId2.hashCode());
- Assert.assertFalse(nodeId3.hashCode() == nodeId4.hashCode());
+ assertTrue(nodeId1.hashCode() == nodeId3.hashCode());
+ assertFalse(nodeId1.hashCode() == nodeId2.hashCode());
+ assertFalse(nodeId3.hashCode() == nodeId4.hashCode());
- Assert.assertEquals("10.18.52.124:8041", nodeId1.toString());
+ assertEquals("10.18.52.124:8041", nodeId1.toString());
@@ -16,9 +16,15 @@
* limitations under the License.
import java.util.Arrays;
+import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableSet;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Disabled;
import org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenRequestProto;
import org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenResponseProto;
@@ -133,8 +139,8 @@ import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.LogAggregationContext;
import org.apache.hadoop.yarn.api.records.NMToken;
import org.apache.hadoop.yarn.api.records.NodeAttribute;
-import org.apache.hadoop.yarn.api.records.NodeAttributeKey;
import org.apache.hadoop.yarn.api.records.NodeAttributeInfo;
+import org.apache.hadoop.yarn.api.records.NodeAttributeKey;
import org.apache.hadoop.yarn.api.records.NodeLabel;
import org.apache.hadoop.yarn.api.records.NodeReport;
@@ -189,8 +195,8 @@ import org.apache.hadoop.yarn.api.records.impl.pb.EnhancedHeadroomPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.ExecutionTypeRequestPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.LocalResourcePBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.NMTokenPBImpl;
-import org.apache.hadoop.yarn.api.records.impl.pb.NodeAttributeKeyPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.NodeAttributeInfoPBImpl;
+import org.apache.hadoop.yarn.api.records.impl.pb.NodeAttributeKeyPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.NodeAttributePBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.NodeIdPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.NodeLabelPBImpl;
@@ -201,8 +207,8 @@ import org.apache.hadoop.yarn.api.records.impl.pb.PreemptionContractPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.PreemptionMessagePBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.PreemptionResourceRequestPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.PriorityPBImpl;
-import org.apache.hadoop.yarn.api.records.impl.pb.QueueInfoPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.QueueConfigurationsPBImpl;
+import org.apache.hadoop.yarn.api.records.impl.pb.QueueInfoPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.QueueUserACLInfoPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.ResourceBlacklistRequestPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.ResourceOptionPBImpl;
@@ -232,8 +238,8 @@ import org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto;
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto;
import org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto;
import org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto;
-import org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto;
import org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto;
+import org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto;
import org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto;
import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto;
import org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto;
@@ -245,8 +251,8 @@ import org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto;
import org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto;
import org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto;
import org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto;
-import org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto;
import org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto;
+import org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto;
import org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto;
import org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto;
import org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto;
@@ -374,19 +380,15 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.ReplaceLabelsOn
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeResourceRequestPBImpl;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeResourceResponsePBImpl;
import org.apache.hadoop.yarn.util.resource.Resources;
-import org.junit.BeforeClass;
-import org.junit.Ignore;
-import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableSet;
* Test class for YARN API protocol records.
public class TestPBImplRecords extends BasePBImplRecordsTest {
- @BeforeClass
+ @BeforeAll
public static void setup() throws Exception {
typeValueCache.put(Range.class, Range.between(1000L, 2000L));
typeValueCache.put(URL.class, URL.newInstance(
@@ -474,326 +476,326 @@ public class TestPBImplRecords extends BasePBImplRecordsTest {
- public void testAllocateRequestPBImpl() throws Exception {
+ void testAllocateRequestPBImpl() throws Exception {
validatePBImplRecord(AllocateRequestPBImpl.class, AllocateRequestProto.class);
- public void testAllocateResponsePBImpl() throws Exception {
+ void testAllocateResponsePBImpl() throws Exception {
validatePBImplRecord(AllocateResponsePBImpl.class, AllocateResponseProto.class);
- public void testCancelDelegationTokenRequestPBImpl() throws Exception {
+ void testCancelDelegationTokenRequestPBImpl() throws Exception {
validatePBImplRecord(CancelDelegationTokenRequestPBImpl.class,
CancelDelegationTokenRequestProto.class);
- public void testCancelDelegationTokenResponsePBImpl() throws Exception {
+ void testCancelDelegationTokenResponsePBImpl() throws Exception {
validatePBImplRecord(CancelDelegationTokenResponsePBImpl.class,
CancelDelegationTokenResponseProto.class);
- public void testFinishApplicationMasterRequestPBImpl() throws Exception {
+ void testFinishApplicationMasterRequestPBImpl() throws Exception {
validatePBImplRecord(FinishApplicationMasterRequestPBImpl.class,
FinishApplicationMasterRequestProto.class);
- public void testFinishApplicationMasterResponsePBImpl() throws Exception {
+ void testFinishApplicationMasterResponsePBImpl() throws Exception {
validatePBImplRecord(FinishApplicationMasterResponsePBImpl.class,
FinishApplicationMasterResponseProto.class);
- public void testGetApplicationAttemptReportRequestPBImpl() throws Exception {
+ void testGetApplicationAttemptReportRequestPBImpl() throws Exception {
validatePBImplRecord(GetApplicationAttemptReportRequestPBImpl.class,
GetApplicationAttemptReportRequestProto.class);
- public void testGetApplicationAttemptReportResponsePBImpl() throws Exception {
+ void testGetApplicationAttemptReportResponsePBImpl() throws Exception {
validatePBImplRecord(GetApplicationAttemptReportResponsePBImpl.class,
GetApplicationAttemptReportResponseProto.class);
- public void testGetApplicationAttemptsRequestPBImpl() throws Exception {
+ void testGetApplicationAttemptsRequestPBImpl() throws Exception {
validatePBImplRecord(GetApplicationAttemptsRequestPBImpl.class,
GetApplicationAttemptsRequestProto.class);
- public void testGetApplicationAttemptsResponsePBImpl() throws Exception {
+ void testGetApplicationAttemptsResponsePBImpl() throws Exception {
validatePBImplRecord(GetApplicationAttemptsResponsePBImpl.class,
GetApplicationAttemptsResponseProto.class);
- public void testGetApplicationReportRequestPBImpl() throws Exception {
+ void testGetApplicationReportRequestPBImpl() throws Exception {
validatePBImplRecord(GetApplicationReportRequestPBImpl.class,
GetApplicationReportRequestProto.class);
- public void testGetApplicationReportResponsePBImpl() throws Exception {
+ void testGetApplicationReportResponsePBImpl() throws Exception {
validatePBImplRecord(GetApplicationReportResponsePBImpl.class,
GetApplicationReportResponseProto.class);
- public void testGetApplicationsRequestPBImpl() throws Exception {
+ void testGetApplicationsRequestPBImpl() throws Exception {
validatePBImplRecord(GetApplicationsRequestPBImpl.class,
GetApplicationsRequestProto.class);
- public void testGetApplicationsResponsePBImpl() throws Exception {
+ void testGetApplicationsResponsePBImpl() throws Exception {
validatePBImplRecord(GetApplicationsResponsePBImpl.class,
GetApplicationsResponseProto.class);
- public void testGetClusterMetricsRequestPBImpl() throws Exception {
+ void testGetClusterMetricsRequestPBImpl() throws Exception {
validatePBImplRecord(GetClusterMetricsRequestPBImpl.class,
GetClusterMetricsRequestProto.class);
- public void testGetClusterMetricsResponsePBImpl() throws Exception {
+ void testGetClusterMetricsResponsePBImpl() throws Exception {
validatePBImplRecord(GetClusterMetricsResponsePBImpl.class,
GetClusterMetricsResponseProto.class);
- public void testGetClusterNodesRequestPBImpl() throws Exception {
+ void testGetClusterNodesRequestPBImpl() throws Exception {
validatePBImplRecord(GetClusterNodesRequestPBImpl.class,
GetClusterNodesRequestProto.class);
- public void testGetClusterNodesResponsePBImpl() throws Exception {
+ void testGetClusterNodesResponsePBImpl() throws Exception {
validatePBImplRecord(GetClusterNodesResponsePBImpl.class,
GetClusterNodesResponseProto.class);
- public void testGetContainerReportRequestPBImpl() throws Exception {
+ void testGetContainerReportRequestPBImpl() throws Exception {
validatePBImplRecord(GetContainerReportRequestPBImpl.class,
GetContainerReportRequestProto.class);
- public void testGetContainerReportResponsePBImpl() throws Exception {
+ void testGetContainerReportResponsePBImpl() throws Exception {
validatePBImplRecord(GetContainerReportResponsePBImpl.class,
GetContainerReportResponseProto.class);
- public void testGetContainersRequestPBImpl() throws Exception {
+ void testGetContainersRequestPBImpl() throws Exception {
validatePBImplRecord(GetContainersRequestPBImpl.class,
GetContainersRequestProto.class);
- public void testGetContainersResponsePBImpl() throws Exception {
+ void testGetContainersResponsePBImpl() throws Exception {
validatePBImplRecord(GetContainersResponsePBImpl.class,
GetContainersResponseProto.class);
- public void testGetContainerStatusesRequestPBImpl() throws Exception {
+ void testGetContainerStatusesRequestPBImpl() throws Exception {
validatePBImplRecord(GetContainerStatusesRequestPBImpl.class,
GetContainerStatusesRequestProto.class);
- public void testGetContainerStatusesResponsePBImpl() throws Exception {
+ void testGetContainerStatusesResponsePBImpl() throws Exception {
validatePBImplRecord(GetContainerStatusesResponsePBImpl.class,
GetContainerStatusesResponseProto.class);
- public void testGetDelegationTokenRequestPBImpl() throws Exception {
+ void testGetDelegationTokenRequestPBImpl() throws Exception {
validatePBImplRecord(GetDelegationTokenRequestPBImpl.class,
GetDelegationTokenRequestProto.class);
- public void testGetDelegationTokenResponsePBImpl() throws Exception {
+ void testGetDelegationTokenResponsePBImpl() throws Exception {
validatePBImplRecord(GetDelegationTokenResponsePBImpl.class,
GetDelegationTokenResponseProto.class);
- public void testGetNewApplicationRequestPBImpl() throws Exception {
+ void testGetNewApplicationRequestPBImpl() throws Exception {
validatePBImplRecord(GetNewApplicationRequestPBImpl.class,
GetNewApplicationRequestProto.class);
- public void testGetNewApplicationResponsePBImpl() throws Exception {
+ void testGetNewApplicationResponsePBImpl() throws Exception {
validatePBImplRecord(GetNewApplicationResponsePBImpl.class,
GetNewApplicationResponseProto.class);
- public void testGetQueueInfoRequestPBImpl() throws Exception {
+ void testGetQueueInfoRequestPBImpl() throws Exception {
validatePBImplRecord(GetQueueInfoRequestPBImpl.class,
GetQueueInfoRequestProto.class);
- public void testGetQueueInfoResponsePBImpl() throws Exception {
+ void testGetQueueInfoResponsePBImpl() throws Exception {
validatePBImplRecord(GetQueueInfoResponsePBImpl.class,
GetQueueInfoResponseProto.class);
- public void testGetQueueUserAclsInfoRequestPBImpl() throws Exception {
+ void testGetQueueUserAclsInfoRequestPBImpl() throws Exception {
validatePBImplRecord(GetQueueUserAclsInfoRequestPBImpl.class,
GetQueueUserAclsInfoRequestProto.class);
- public void testGetQueueUserAclsInfoResponsePBImpl() throws Exception {
+ void testGetQueueUserAclsInfoResponsePBImpl() throws Exception {
validatePBImplRecord(GetQueueUserAclsInfoResponsePBImpl.class,
GetQueueUserAclsInfoResponseProto.class);
- public void testKillApplicationRequestPBImpl() throws Exception {
+ void testKillApplicationRequestPBImpl() throws Exception {
validatePBImplRecord(KillApplicationRequestPBImpl.class,
KillApplicationRequestProto.class);
- public void testKillApplicationResponsePBImpl() throws Exception {
+ void testKillApplicationResponsePBImpl() throws Exception {
validatePBImplRecord(KillApplicationResponsePBImpl.class,
KillApplicationResponseProto.class);
- public void testMoveApplicationAcrossQueuesRequestPBImpl() throws Exception {
+ void testMoveApplicationAcrossQueuesRequestPBImpl() throws Exception {
validatePBImplRecord(MoveApplicationAcrossQueuesRequestPBImpl.class,
MoveApplicationAcrossQueuesRequestProto.class);
- public void testMoveApplicationAcrossQueuesResponsePBImpl() throws Exception {
+ void testMoveApplicationAcrossQueuesResponsePBImpl() throws Exception {
validatePBImplRecord(MoveApplicationAcrossQueuesResponsePBImpl.class,
MoveApplicationAcrossQueuesResponseProto.class);
- public void testRegisterApplicationMasterRequestPBImpl() throws Exception {
+ void testRegisterApplicationMasterRequestPBImpl() throws Exception {
validatePBImplRecord(RegisterApplicationMasterRequestPBImpl.class,
RegisterApplicationMasterRequestProto.class);
- public void testRegisterApplicationMasterResponsePBImpl() throws Exception {
+ void testRegisterApplicationMasterResponsePBImpl() throws Exception {
validatePBImplRecord(RegisterApplicationMasterResponsePBImpl.class,
RegisterApplicationMasterResponseProto.class);
- public void testRenewDelegationTokenRequestPBImpl() throws Exception {
+ void testRenewDelegationTokenRequestPBImpl() throws Exception {
validatePBImplRecord(RenewDelegationTokenRequestPBImpl.class,
RenewDelegationTokenRequestProto.class);
- public void testRenewDelegationTokenResponsePBImpl() throws Exception {
+ void testRenewDelegationTokenResponsePBImpl() throws Exception {
validatePBImplRecord(RenewDelegationTokenResponsePBImpl.class,
RenewDelegationTokenResponseProto.class);
- public void testStartContainerRequestPBImpl() throws Exception {
+ void testStartContainerRequestPBImpl() throws Exception {
validatePBImplRecord(StartContainerRequestPBImpl.class,
StartContainerRequestProto.class);
- public void testStartContainersRequestPBImpl() throws Exception {
+ void testStartContainersRequestPBImpl() throws Exception {
validatePBImplRecord(StartContainersRequestPBImpl.class,
StartContainersRequestProto.class);
- public void testStartContainersResponsePBImpl() throws Exception {
+ void testStartContainersResponsePBImpl() throws Exception {
validatePBImplRecord(StartContainersResponsePBImpl.class,
StartContainersResponseProto.class);
- public void testStopContainersRequestPBImpl() throws Exception {
+ void testStopContainersRequestPBImpl() throws Exception {
validatePBImplRecord(StopContainersRequestPBImpl.class,
StopContainersRequestProto.class);
- public void testStopContainersResponsePBImpl() throws Exception {
+ void testStopContainersResponsePBImpl() throws Exception {
validatePBImplRecord(StopContainersResponsePBImpl.class,
StopContainersResponseProto.class);
- public void testIncreaseContainersResourceRequestPBImpl() throws Exception {
+ void testIncreaseContainersResourceRequestPBImpl() throws Exception {
validatePBImplRecord(IncreaseContainersResourceRequestPBImpl.class,
IncreaseContainersResourceRequestProto.class);
- public void testIncreaseContainersResourceResponsePBImpl() throws Exception {
+ void testIncreaseContainersResourceResponsePBImpl() throws Exception {
validatePBImplRecord(IncreaseContainersResourceResponsePBImpl.class,
IncreaseContainersResourceResponseProto.class);
- public void testSubmitApplicationRequestPBImpl() throws Exception {
+ void testSubmitApplicationRequestPBImpl() throws Exception {
validatePBImplRecord(SubmitApplicationRequestPBImpl.class,
SubmitApplicationRequestProto.class);
- public void testSubmitApplicationResponsePBImpl() throws Exception {
+ void testSubmitApplicationResponsePBImpl() throws Exception {
validatePBImplRecord(SubmitApplicationResponsePBImpl.class,
SubmitApplicationResponseProto.class);
- @Test
- @Ignore
// ignore cause ApplicationIdPBImpl is immutable
- public void testApplicationAttemptIdPBImpl() throws Exception {
+ @Test
+ @Disabled
+ void testApplicationAttemptIdPBImpl() throws Exception {
validatePBImplRecord(ApplicationAttemptIdPBImpl.class,
ApplicationAttemptIdProto.class);
- public void testApplicationAttemptReportPBImpl() throws Exception {
+ void testApplicationAttemptReportPBImpl() throws Exception {
validatePBImplRecord(ApplicationAttemptReportPBImpl.class,
ApplicationAttemptReportProto.class);
- public void testApplicationIdPBImpl() throws Exception {
+ void testApplicationIdPBImpl() throws Exception {
validatePBImplRecord(ApplicationIdPBImpl.class, ApplicationIdProto.class);
- public void testApplicationReportPBImpl() throws Exception {
+ void testApplicationReportPBImpl() throws Exception {
validatePBImplRecord(ApplicationReportPBImpl.class,
ApplicationReportProto.class);
- public void testApplicationResourceUsageReportPBImpl() throws Exception {
+ void testApplicationResourceUsageReportPBImpl() throws Exception {
excludedPropertiesMap.put(ApplicationResourceUsageReportPBImpl.class.getClass(),
Arrays.asList("PreemptedResourceSecondsMap", "ResourceSecondsMap"));
validatePBImplRecord(ApplicationResourceUsageReportPBImpl.class,
@@ -801,550 +803,550 @@ public class TestPBImplRecords extends BasePBImplRecordsTest {
- public void testApplicationSubmissionContextPBImpl() throws Exception {
+ void testApplicationSubmissionContextPBImpl() throws Exception {
validatePBImplRecord(ApplicationSubmissionContextPBImpl.class,
ApplicationSubmissionContextProto.class);
ApplicationSubmissionContext ctx =
ApplicationSubmissionContext.newInstance(null, null, null, null, null,
false, false, 0, Resources.none(), null, false, null, null);
- Assert.assertNotNull(ctx.getResource());
+ assertNotNull(ctx.getResource());
- public void testContainerIdPBImpl() throws Exception {
+ void testContainerIdPBImpl() throws Exception {
validatePBImplRecord(ContainerIdPBImpl.class, ContainerIdProto.class);
- public void testContainerRetryPBImpl() throws Exception {
+ void testContainerRetryPBImpl() throws Exception {
validatePBImplRecord(ContainerRetryContextPBImpl.class,
ContainerRetryContextProto.class);
- public void testContainerLaunchContextPBImpl() throws Exception {
+ void testContainerLaunchContextPBImpl() throws Exception {
validatePBImplRecord(ContainerLaunchContextPBImpl.class,
ContainerLaunchContextProto.class);
- public void testResourceLocalizationRequest() throws Exception {
+ void testResourceLocalizationRequest() throws Exception {
validatePBImplRecord(ResourceLocalizationRequestPBImpl.class,
YarnServiceProtos.ResourceLocalizationRequestProto.class);
- public void testResourceLocalizationResponse() throws Exception {
+ void testResourceLocalizationResponse() throws Exception {
validatePBImplRecord(ResourceLocalizationResponsePBImpl.class,
YarnServiceProtos.ResourceLocalizationResponseProto.class);
- public void testContainerPBImpl() throws Exception {
+ void testContainerPBImpl() throws Exception {
validatePBImplRecord(ContainerPBImpl.class, ContainerProto.class);
- public void testContainerReportPBImpl() throws Exception {
+ void testContainerReportPBImpl() throws Exception {
validatePBImplRecord(ContainerReportPBImpl.class, ContainerReportProto.class);
- public void testUpdateContainerRequestPBImpl() throws Exception {
+ void testUpdateContainerRequestPBImpl() throws Exception {
validatePBImplRecord(UpdateContainerRequestPBImpl.class,
YarnServiceProtos.UpdateContainerRequestProto.class);
- public void testContainerStatusPBImpl() throws Exception {
+ void testContainerStatusPBImpl() throws Exception {
validatePBImplRecord(ContainerStatusPBImpl.class, ContainerStatusProto.class);
- public void testLocalResourcePBImpl() throws Exception {
+ void testLocalResourcePBImpl() throws Exception {
validatePBImplRecord(LocalResourcePBImpl.class, LocalResourceProto.class);
- public void testNMTokenPBImpl() throws Exception {
+ void testNMTokenPBImpl() throws Exception {
validatePBImplRecord(NMTokenPBImpl.class, NMTokenProto.class);
- public void testNodeIdPBImpl() throws Exception {
+ void testNodeIdPBImpl() throws Exception {
validatePBImplRecord(NodeIdPBImpl.class, NodeIdProto.class);
- public void testNodeReportPBImpl() throws Exception {
+ void testNodeReportPBImpl() throws Exception {
validatePBImplRecord(NodeReportPBImpl.class, NodeReportProto.class);
- public void testPreemptionContainerPBImpl() throws Exception {
+ void testPreemptionContainerPBImpl() throws Exception {
validatePBImplRecord(PreemptionContainerPBImpl.class,
PreemptionContainerProto.class);
- public void testPreemptionContractPBImpl() throws Exception {
+ void testPreemptionContractPBImpl() throws Exception {
validatePBImplRecord(PreemptionContractPBImpl.class,
PreemptionContractProto.class);
- public void testPreemptionMessagePBImpl() throws Exception {
+ void testPreemptionMessagePBImpl() throws Exception {
validatePBImplRecord(PreemptionMessagePBImpl.class,
PreemptionMessageProto.class);
- public void testPreemptionResourceRequestPBImpl() throws Exception {
+ void testPreemptionResourceRequestPBImpl() throws Exception {
validatePBImplRecord(PreemptionResourceRequestPBImpl.class,
PreemptionResourceRequestProto.class);
- public void testPriorityPBImpl() throws Exception {
+ void testPriorityPBImpl() throws Exception {
validatePBImplRecord(PriorityPBImpl.class, PriorityProto.class);
- public void testQueueInfoPBImpl() throws Exception {
+ void testQueueInfoPBImpl() throws Exception {
validatePBImplRecord(QueueInfoPBImpl.class, QueueInfoProto.class);
- public void testQueueConfigurationsPBImpl() throws Exception{
+ void testQueueConfigurationsPBImpl() throws Exception {
validatePBImplRecord(QueueConfigurationsPBImpl.class,
QueueConfigurationsProto.class);
- public void testQueueUserACLInfoPBImpl() throws Exception {
+ void testQueueUserACLInfoPBImpl() throws Exception {
validatePBImplRecord(QueueUserACLInfoPBImpl.class,
QueueUserACLInfoProto.class);
- public void testResourceBlacklistRequestPBImpl() throws Exception {
+ void testResourceBlacklistRequestPBImpl() throws Exception {
validatePBImplRecord(ResourceBlacklistRequestPBImpl.class,
ResourceBlacklistRequestProto.class);
// ignore as ResourceOptionPBImpl is immutable
- public void testResourceOptionPBImpl() throws Exception {
+ void testResourceOptionPBImpl() throws Exception {
validatePBImplRecord(ResourceOptionPBImpl.class, ResourceOptionProto.class);
- public void testResourcePBImpl() throws Exception {
+ void testResourcePBImpl() throws Exception {
validatePBImplRecord(ResourcePBImpl.class, ResourceProto.class);
- public void testResourceRequestPBImpl() throws Exception {
+ void testResourceRequestPBImpl() throws Exception {
validatePBImplRecord(ResourceRequestPBImpl.class, ResourceRequestProto.class);
- public void testResourceSizingPBImpl() throws Exception {
+ void testResourceSizingPBImpl() throws Exception {
validatePBImplRecord(ResourceSizingPBImpl.class, ResourceSizingProto.class);
- public void testSchedulingRequestPBImpl() throws Exception {
+ void testSchedulingRequestPBImpl() throws Exception {
validatePBImplRecord(SchedulingRequestPBImpl.class,
SchedulingRequestProto.class);
- public void testSerializedExceptionPBImpl() throws Exception {
+ void testSerializedExceptionPBImpl() throws Exception {
validatePBImplRecord(SerializedExceptionPBImpl.class,
SerializedExceptionProto.class);
- public void testStrictPreemptionContractPBImpl() throws Exception {
+ void testStrictPreemptionContractPBImpl() throws Exception {
validatePBImplRecord(StrictPreemptionContractPBImpl.class,
StrictPreemptionContractProto.class);
- public void testTokenPBImpl() throws Exception {
+ void testTokenPBImpl() throws Exception {
validatePBImplRecord(TokenPBImpl.class, TokenProto.class);
- public void testURLPBImpl() throws Exception {
+ void testURLPBImpl() throws Exception {
validatePBImplRecord(URLPBImpl.class, URLProto.class);
- public void testYarnClusterMetricsPBImpl() throws Exception {
+ void testYarnClusterMetricsPBImpl() throws Exception {
validatePBImplRecord(YarnClusterMetricsPBImpl.class,
YarnClusterMetricsProto.class);
- public void testRefreshAdminAclsRequestPBImpl() throws Exception {
+ void testRefreshAdminAclsRequestPBImpl() throws Exception {
validatePBImplRecord(RefreshAdminAclsRequestPBImpl.class,
RefreshAdminAclsRequestProto.class);
- public void testRefreshAdminAclsResponsePBImpl() throws Exception {
+ void testRefreshAdminAclsResponsePBImpl() throws Exception {
validatePBImplRecord(RefreshAdminAclsResponsePBImpl.class,
RefreshAdminAclsResponseProto.class);
- public void testRefreshNodesRequestPBImpl() throws Exception {
+ void testRefreshNodesRequestPBImpl() throws Exception {
validatePBImplRecord(RefreshNodesRequestPBImpl.class,
RefreshNodesRequestProto.class);
- public void testRefreshNodesResponsePBImpl() throws Exception {
+ void testRefreshNodesResponsePBImpl() throws Exception {
validatePBImplRecord(RefreshNodesResponsePBImpl.class,
RefreshNodesResponseProto.class);
- public void testRefreshQueuesRequestPBImpl() throws Exception {
+ void testRefreshQueuesRequestPBImpl() throws Exception {
validatePBImplRecord(RefreshQueuesRequestPBImpl.class,
RefreshQueuesRequestProto.class);
- public void testRefreshQueuesResponsePBImpl() throws Exception {
+ void testRefreshQueuesResponsePBImpl() throws Exception {
validatePBImplRecord(RefreshQueuesResponsePBImpl.class,
RefreshQueuesResponseProto.class);
- public void testRefreshNodesResourcesRequestPBImpl() throws Exception {
+ void testRefreshNodesResourcesRequestPBImpl() throws Exception {
validatePBImplRecord(RefreshNodesResourcesRequestPBImpl.class,
RefreshNodesResourcesRequestProto.class);
- public void testRefreshNodesResourcesResponsePBImpl() throws Exception {
+ void testRefreshNodesResourcesResponsePBImpl() throws Exception {
validatePBImplRecord(RefreshNodesResourcesResponsePBImpl.class,
RefreshNodesResourcesResponseProto.class);
- public void testRefreshServiceAclsRequestPBImpl() throws Exception {
+ void testRefreshServiceAclsRequestPBImpl() throws Exception {
validatePBImplRecord(RefreshServiceAclsRequestPBImpl.class,
RefreshServiceAclsRequestProto.class);
- public void testRefreshServiceAclsResponsePBImpl() throws Exception {
+ void testRefreshServiceAclsResponsePBImpl() throws Exception {
validatePBImplRecord(RefreshServiceAclsResponsePBImpl.class,
RefreshServiceAclsResponseProto.class);
- public void testRefreshSuperUserGroupsConfigurationRequestPBImpl()
+ void testRefreshSuperUserGroupsConfigurationRequestPBImpl()
throws Exception {
validatePBImplRecord(RefreshSuperUserGroupsConfigurationRequestPBImpl.class,
RefreshSuperUserGroupsConfigurationRequestProto.class);
- public void testRefreshSuperUserGroupsConfigurationResponsePBImpl()
+ void testRefreshSuperUserGroupsConfigurationResponsePBImpl()
validatePBImplRecord(RefreshSuperUserGroupsConfigurationResponsePBImpl.class,
RefreshSuperUserGroupsConfigurationResponseProto.class);
- public void testRefreshUserToGroupsMappingsRequestPBImpl() throws Exception {
+ void testRefreshUserToGroupsMappingsRequestPBImpl() throws Exception {
validatePBImplRecord(RefreshUserToGroupsMappingsRequestPBImpl.class,
RefreshUserToGroupsMappingsRequestProto.class);
- public void testRefreshUserToGroupsMappingsResponsePBImpl() throws Exception {
+ void testRefreshUserToGroupsMappingsResponsePBImpl() throws Exception {
validatePBImplRecord(RefreshUserToGroupsMappingsResponsePBImpl.class,
RefreshUserToGroupsMappingsResponseProto.class);
- public void testUpdateNodeResourceRequestPBImpl() throws Exception {
+ void testUpdateNodeResourceRequestPBImpl() throws Exception {
validatePBImplRecord(UpdateNodeResourceRequestPBImpl.class,
UpdateNodeResourceRequestProto.class);
- public void testUpdateNodeResourceResponsePBImpl() throws Exception {
+ void testUpdateNodeResourceResponsePBImpl() throws Exception {
validatePBImplRecord(UpdateNodeResourceResponsePBImpl.class,
UpdateNodeResourceResponseProto.class);
- public void testReservationSubmissionRequestPBImpl() throws Exception {
+ void testReservationSubmissionRequestPBImpl() throws Exception {
validatePBImplRecord(ReservationSubmissionRequestPBImpl.class,
ReservationSubmissionRequestProto.class);
- public void testReservationSubmissionResponsePBImpl() throws Exception {
+ void testReservationSubmissionResponsePBImpl() throws Exception {
validatePBImplRecord(ReservationSubmissionResponsePBImpl.class,
ReservationSubmissionResponseProto.class);
- public void testReservationUpdateRequestPBImpl() throws Exception {
+ void testReservationUpdateRequestPBImpl() throws Exception {
validatePBImplRecord(ReservationUpdateRequestPBImpl.class,
ReservationUpdateRequestProto.class);
- public void testReservationUpdateResponsePBImpl() throws Exception {
+ void testReservationUpdateResponsePBImpl() throws Exception {
validatePBImplRecord(ReservationUpdateResponsePBImpl.class,
ReservationUpdateResponseProto.class);
- public void testReservationDeleteRequestPBImpl() throws Exception {
+ void testReservationDeleteRequestPBImpl() throws Exception {
validatePBImplRecord(ReservationDeleteRequestPBImpl.class,
ReservationDeleteRequestProto.class);
- public void testReservationDeleteResponsePBImpl() throws Exception {
+ void testReservationDeleteResponsePBImpl() throws Exception {
validatePBImplRecord(ReservationDeleteResponsePBImpl.class,
ReservationDeleteResponseProto.class);
- public void testReservationListRequestPBImpl() throws Exception {
+ void testReservationListRequestPBImpl() throws Exception {
validatePBImplRecord(ReservationListRequestPBImpl.class,
- ReservationListRequestProto.class);
+ ReservationListRequestProto.class);
- public void testReservationListResponsePBImpl() throws Exception {
+ void testReservationListResponsePBImpl() throws Exception {
validatePBImplRecord(ReservationListResponsePBImpl.class,
- ReservationListResponseProto.class);
+ ReservationListResponseProto.class);
- public void testAddToClusterNodeLabelsRequestPBImpl() throws Exception {
+ void testAddToClusterNodeLabelsRequestPBImpl() throws Exception {
validatePBImplRecord(AddToClusterNodeLabelsRequestPBImpl.class,
AddToClusterNodeLabelsRequestProto.class);
- public void testAddToClusterNodeLabelsResponsePBImpl() throws Exception {
+ void testAddToClusterNodeLabelsResponsePBImpl() throws Exception {
validatePBImplRecord(AddToClusterNodeLabelsResponsePBImpl.class,
AddToClusterNodeLabelsResponseProto.class);
- public void testRemoveFromClusterNodeLabelsRequestPBImpl() throws Exception {
+ void testRemoveFromClusterNodeLabelsRequestPBImpl() throws Exception {
validatePBImplRecord(RemoveFromClusterNodeLabelsRequestPBImpl.class,
RemoveFromClusterNodeLabelsRequestProto.class);
- public void testRemoveFromClusterNodeLabelsResponsePBImpl() throws Exception {
+ void testRemoveFromClusterNodeLabelsResponsePBImpl() throws Exception {
validatePBImplRecord(RemoveFromClusterNodeLabelsResponsePBImpl.class,
RemoveFromClusterNodeLabelsResponseProto.class);
- public void testGetClusterNodeLabelsRequestPBImpl() throws Exception {
+ void testGetClusterNodeLabelsRequestPBImpl() throws Exception {
validatePBImplRecord(GetClusterNodeLabelsRequestPBImpl.class,
GetClusterNodeLabelsRequestProto.class);
- public void testGetClusterNodeLabelsResponsePBImpl() throws Exception {
+ void testGetClusterNodeLabelsResponsePBImpl() throws Exception {
validatePBImplRecord(GetClusterNodeLabelsResponsePBImpl.class,
GetClusterNodeLabelsResponseProto.class);
- public void testReplaceLabelsOnNodeRequestPBImpl() throws Exception {
+ void testReplaceLabelsOnNodeRequestPBImpl() throws Exception {
validatePBImplRecord(ReplaceLabelsOnNodeRequestPBImpl.class,
ReplaceLabelsOnNodeRequestProto.class);
- public void testReplaceLabelsOnNodeResponsePBImpl() throws Exception {
+ void testReplaceLabelsOnNodeResponsePBImpl() throws Exception {
validatePBImplRecord(ReplaceLabelsOnNodeResponsePBImpl.class,
ReplaceLabelsOnNodeResponseProto.class);
- public void testGetNodeToLabelsRequestPBImpl() throws Exception {
+ void testGetNodeToLabelsRequestPBImpl() throws Exception {
validatePBImplRecord(GetNodesToLabelsRequestPBImpl.class,
GetNodesToLabelsRequestProto.class);
- public void testGetNodeToLabelsResponsePBImpl() throws Exception {
+ void testGetNodeToLabelsResponsePBImpl() throws Exception {
validatePBImplRecord(GetNodesToLabelsResponsePBImpl.class,
GetNodesToLabelsResponseProto.class);
- public void testGetLabelsToNodesRequestPBImpl() throws Exception {
+ void testGetLabelsToNodesRequestPBImpl() throws Exception {
validatePBImplRecord(GetLabelsToNodesRequestPBImpl.class,
GetLabelsToNodesRequestProto.class);
- public void testGetLabelsToNodesResponsePBImpl() throws Exception {
+ void testGetLabelsToNodesResponsePBImpl() throws Exception {
validatePBImplRecord(GetLabelsToNodesResponsePBImpl.class,
GetLabelsToNodesResponseProto.class);
- public void testNodeLabelAttributesPBImpl() throws Exception {
+ void testNodeLabelAttributesPBImpl() throws Exception {
validatePBImplRecord(NodeLabelPBImpl.class,
NodeLabelProto.class);
- public void testCheckForDecommissioningNodesRequestPBImpl() throws Exception {
+ void testCheckForDecommissioningNodesRequestPBImpl() throws Exception {
validatePBImplRecord(CheckForDecommissioningNodesRequestPBImpl.class,
CheckForDecommissioningNodesRequestProto.class);
- public void testCheckForDecommissioningNodesResponsePBImpl() throws Exception {
+ void testCheckForDecommissioningNodesResponsePBImpl() throws Exception {
validatePBImplRecord(CheckForDecommissioningNodesResponsePBImpl.class,
CheckForDecommissioningNodesResponseProto.class);
- public void testExecutionTypeRequestPBImpl() throws Exception {
+ void testExecutionTypeRequestPBImpl() throws Exception {
validatePBImplRecord(ExecutionTypeRequestPBImpl.class,
ExecutionTypeRequestProto.class);
- public void testGetAllResourceProfilesResponsePBImpl() throws Exception {
+ void testGetAllResourceProfilesResponsePBImpl() throws Exception {
validatePBImplRecord(GetAllResourceProfilesResponsePBImpl.class,
GetAllResourceProfilesResponseProto.class);
- public void testGetResourceProfileRequestPBImpl() throws Exception {
+ void testGetResourceProfileRequestPBImpl() throws Exception {
validatePBImplRecord(GetResourceProfileRequestPBImpl.class,
GetResourceProfileRequestProto.class);
- public void testGetResourceProfileResponsePBImpl() throws Exception {
+ void testGetResourceProfileResponsePBImpl() throws Exception {
validatePBImplRecord(GetResourceProfileResponsePBImpl.class,
GetResourceProfileResponseProto.class);
- public void testResourceTypesInfoPBImpl() throws Exception {
+ void testResourceTypesInfoPBImpl() throws Exception {
validatePBImplRecord(ResourceTypeInfoPBImpl.class,
YarnProtos.ResourceTypeInfoProto.class);
- public void testGetAllResourceTypesInfoRequestPBImpl() throws Exception {
+ void testGetAllResourceTypesInfoRequestPBImpl() throws Exception {
validatePBImplRecord(GetAllResourceTypeInfoRequestPBImpl.class,
YarnServiceProtos.GetAllResourceTypeInfoRequestProto.class);
- public void testGetAllResourceTypesInfoResponsePBImpl() throws Exception {
+ void testGetAllResourceTypesInfoResponsePBImpl() throws Exception {
validatePBImplRecord(GetAllResourceTypeInfoResponsePBImpl.class,
YarnServiceProtos.GetAllResourceTypeInfoResponseProto.class);
- public void testNodeAttributeKeyPBImpl() throws Exception {
+ void testNodeAttributeKeyPBImpl() throws Exception {
validatePBImplRecord(NodeAttributeKeyPBImpl.class,
NodeAttributeKeyProto.class);
- public void testNodeToAttributeValuePBImpl() throws Exception {
+ void testNodeToAttributeValuePBImpl() throws Exception {
validatePBImplRecord(NodeToAttributeValuePBImpl.class,
NodeToAttributeValueProto.class);
- public void testNodeAttributePBImpl() throws Exception {
+ void testNodeAttributePBImpl() throws Exception {
validatePBImplRecord(NodeAttributePBImpl.class, NodeAttributeProto.class);
- public void testNodeAttributeInfoPBImpl() throws Exception {
+ void testNodeAttributeInfoPBImpl() throws Exception {
validatePBImplRecord(NodeAttributeInfoPBImpl.class,
NodeAttributeInfoProto.class);
- public void testNodeToAttributesPBImpl() throws Exception {
+ void testNodeToAttributesPBImpl() throws Exception {
validatePBImplRecord(NodeToAttributesPBImpl.class,
NodeToAttributesProto.class);
- public void testNodesToAttributesMappingRequestPBImpl() throws Exception {
+ void testNodesToAttributesMappingRequestPBImpl() throws Exception {
validatePBImplRecord(NodesToAttributesMappingRequestPBImpl.class,
NodesToAttributesMappingRequestProto.class);
- public void testGetAttributesToNodesRequestPBImpl() throws Exception {
+ void testGetAttributesToNodesRequestPBImpl() throws Exception {
validatePBImplRecord(GetAttributesToNodesRequestPBImpl.class,
YarnServiceProtos.GetAttributesToNodesRequestProto.class);
- public void testGetAttributesToNodesResponsePBImpl() throws Exception {
+ void testGetAttributesToNodesResponsePBImpl() throws Exception {
validatePBImplRecord(GetAttributesToNodesResponsePBImpl.class,
YarnServiceProtos.GetAttributesToNodesResponseProto.class);
- public void testGetClusterNodeAttributesRequestPBImpl() throws Exception {
+ void testGetClusterNodeAttributesRequestPBImpl() throws Exception {
validatePBImplRecord(GetClusterNodeAttributesRequestPBImpl.class,
YarnServiceProtos.GetClusterNodeAttributesRequestProto.class);
- public void testGetClusterNodeAttributesResponsePBImpl() throws Exception {
+ void testGetClusterNodeAttributesResponsePBImpl() throws Exception {
validatePBImplRecord(GetClusterNodeAttributesResponsePBImpl.class,
YarnServiceProtos.GetClusterNodeAttributesResponseProto.class);
- public void testGetNodesToAttributesRequestPBImpl() throws Exception {
+ void testGetNodesToAttributesRequestPBImpl() throws Exception {
validatePBImplRecord(GetNodesToAttributesRequestPBImpl.class,
YarnServiceProtos.GetNodesToAttributesRequestProto.class);
- public void testGetNodesToAttributesResponsePBImpl() throws Exception {
+ void testGetNodesToAttributesResponsePBImpl() throws Exception {
validatePBImplRecord(GetNodesToAttributesResponsePBImpl.class,
YarnServiceProtos.GetNodesToAttributesResponseProto.class);
- public void testGetEnhancedHeadroomPBImpl() throws Exception {
+ void testGetEnhancedHeadroomPBImpl() throws Exception {
validatePBImplRecord(EnhancedHeadroomPBImpl.class,
YarnServiceProtos.EnhancedHeadroomProto.class);
@@ -18,17 +18,10 @@
-import static org.apache.hadoop.yarn.api.resource.PlacementConstraints.NODE;
-import static org.apache.hadoop.yarn.api.resource.PlacementConstraints.RACK;
-import static org.apache.hadoop.yarn.api.resource.PlacementConstraints.cardinality;
-import static org.apache.hadoop.yarn.api.resource.PlacementConstraints.maxCardinality;
-import static org.apache.hadoop.yarn.api.resource.PlacementConstraints.or;
-import static org.apache.hadoop.yarn.api.resource.PlacementConstraints.targetCardinality;
-import static org.apache.hadoop.yarn.api.resource.PlacementConstraints.targetIn;
-import static org.apache.hadoop.yarn.api.resource.PlacementConstraints.PlacementTargets.allocationTag;
import org.apache.hadoop.yarn.api.pb.PlacementConstraintFromProtoConverter;
import org.apache.hadoop.yarn.api.pb.PlacementConstraintToProtoConverter;
@@ -40,8 +33,18 @@ import org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto
import org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.CompositeType;
import org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto;
import org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto;
+import static org.apache.hadoop.yarn.api.resource.PlacementConstraints.NODE;
+import static org.apache.hadoop.yarn.api.resource.PlacementConstraints.RACK;
+import static org.apache.hadoop.yarn.api.resource.PlacementConstraints.cardinality;
+import static org.apache.hadoop.yarn.api.resource.PlacementConstraints.maxCardinality;
+import static org.apache.hadoop.yarn.api.resource.PlacementConstraints.or;
+import static org.apache.hadoop.yarn.api.resource.PlacementConstraints.targetCardinality;
+import static org.apache.hadoop.yarn.api.resource.PlacementConstraints.targetIn;
* Test class for {@link PlacementConstraintToProtoConverter} and
@@ -50,10 +53,10 @@ import org.junit.Test;
public class TestPlacementConstraintPBConversion {
- public void testTargetConstraintProtoConverter() {
+ void testTargetConstraintProtoConverter() {
AbstractConstraint sConstraintExpr =
targetIn(NODE, allocationTag("hbase-m"));
- Assert.assertTrue(sConstraintExpr instanceof SingleConstraint);
+ assertTrue(sConstraintExpr instanceof SingleConstraint);
SingleConstraint single = (SingleConstraint) sConstraintExpr;
PlacementConstraint sConstraint =
PlacementConstraints.build(sConstraintExpr);
@@ -63,14 +66,14 @@ public class TestPlacementConstraintPBConversion {
new PlacementConstraintToProtoConverter(sConstraint);
PlacementConstraintProto protoConstraint = toProtoConverter.convert();
- Assert.assertTrue(protoConstraint.hasSimpleConstraint());
- Assert.assertFalse(protoConstraint.hasCompositeConstraint());
+ assertTrue(protoConstraint.hasSimpleConstraint());
+ assertFalse(protoConstraint.hasCompositeConstraint());
SimplePlacementConstraintProto sProto =
protoConstraint.getSimpleConstraint();
- Assert.assertEquals(single.getScope(), sProto.getScope());
- Assert.assertEquals(single.getMinCardinality(), sProto.getMinCardinality());
- Assert.assertEquals(single.getMaxCardinality(), sProto.getMaxCardinality());
- Assert.assertEquals(single.getTargetExpressions().size(),
+ assertEquals(single.getScope(), sProto.getScope());
+ assertEquals(single.getMinCardinality(), sProto.getMinCardinality());
+ assertEquals(single.getMaxCardinality(), sProto.getMaxCardinality());
+ assertEquals(single.getTargetExpressions().size(),
sProto.getTargetExpressionsList().size());
// Convert from proto.
@@ -79,21 +82,21 @@ public class TestPlacementConstraintPBConversion {
PlacementConstraint newConstraint = fromProtoConverter.convert();
AbstractConstraint newConstraintExpr = newConstraint.getConstraintExpr();
- Assert.assertTrue(newConstraintExpr instanceof SingleConstraint);
+ assertTrue(newConstraintExpr instanceof SingleConstraint);
SingleConstraint newSingle = (SingleConstraint) newConstraintExpr;
- Assert.assertEquals(single.getScope(), newSingle.getScope());
- Assert.assertEquals(single.getMinCardinality(),
+ assertEquals(single.getScope(), newSingle.getScope());
+ assertEquals(single.getMinCardinality(),
newSingle.getMinCardinality());
- Assert.assertEquals(single.getMaxCardinality(),
+ assertEquals(single.getMaxCardinality(),
newSingle.getMaxCardinality());
- Assert.assertEquals(single.getTargetExpressions(),
+ assertEquals(single.getTargetExpressions(),
newSingle.getTargetExpressions());
- public void testCardinalityConstraintProtoConverter() {
+ void testCardinalityConstraintProtoConverter() {
AbstractConstraint sConstraintExpr = cardinality(RACK, 3, 10);
@@ -111,17 +114,17 @@ public class TestPlacementConstraintPBConversion {
compareSimpleConstraints(single, newSingle);
- public void testCompositeConstraintProtoConverter() {
+ void testCompositeConstraintProtoConverter() {
AbstractConstraint constraintExpr =
or(targetIn(RACK, allocationTag("spark")), maxCardinality(NODE, 3),
targetCardinality(RACK, 2, 10, allocationTag("zk")));
- Assert.assertTrue(constraintExpr instanceof Or);
+ assertTrue(constraintExpr instanceof Or);
PlacementConstraint constraint = PlacementConstraints.build(constraintExpr);
Or orExpr = (Or) constraintExpr;
@@ -130,14 +133,14 @@ public class TestPlacementConstraintPBConversion {
new PlacementConstraintToProtoConverter(constraint);
- Assert.assertFalse(protoConstraint.hasSimpleConstraint());
- Assert.assertTrue(protoConstraint.hasCompositeConstraint());
+ assertFalse(protoConstraint.hasSimpleConstraint());
+ assertTrue(protoConstraint.hasCompositeConstraint());
CompositePlacementConstraintProto cProto =
protoConstraint.getCompositeConstraint();
- Assert.assertEquals(CompositeType.OR, cProto.getCompositeType());
- Assert.assertEquals(3, cProto.getChildConstraintsCount());
- Assert.assertEquals(0, cProto.getTimedChildConstraintsCount());
+ assertEquals(CompositeType.OR, cProto.getCompositeType());
+ assertEquals(3, cProto.getChildConstraintsCount());
+ assertEquals(0, cProto.getTimedChildConstraintsCount());
Iterator<AbstractConstraint> orChildren = orExpr.getChildren().iterator();
Iterator<PlacementConstraintProto> orProtoChildren =
cProto.getChildConstraintsList().iterator();
@@ -153,9 +156,9 @@ public class TestPlacementConstraintPBConversion {
- Assert.assertTrue(newConstraintExpr instanceof Or);
+ assertTrue(newConstraintExpr instanceof Or);
Or newOrExpr = (Or) newConstraintExpr;
- Assert.assertEquals(3, newOrExpr.getChildren().size());
+ assertEquals(3, newOrExpr.getChildren().size());
orChildren = orExpr.getChildren().iterator();
Iterator<AbstractConstraint> newOrChildren =
newOrExpr.getChildren().iterator();
@@ -169,26 +172,26 @@ public class TestPlacementConstraintPBConversion {
private void compareSimpleConstraintToProto(SingleConstraint constraint,
PlacementConstraintProto proto) {
- Assert.assertTrue(proto.hasSimpleConstraint());
- Assert.assertFalse(proto.hasCompositeConstraint());
+ assertTrue(proto.hasSimpleConstraint());
+ assertFalse(proto.hasCompositeConstraint());
SimplePlacementConstraintProto sProto = proto.getSimpleConstraint();
- Assert.assertEquals(constraint.getScope(), sProto.getScope());
- Assert.assertEquals(constraint.getMinCardinality(),
+ assertEquals(constraint.getScope(), sProto.getScope());
+ assertEquals(constraint.getMinCardinality(),
sProto.getMinCardinality());
- Assert.assertEquals(constraint.getMaxCardinality(),
+ assertEquals(constraint.getMaxCardinality(),
sProto.getMaxCardinality());
- Assert.assertEquals(constraint.getTargetExpressions().size(),
+ assertEquals(constraint.getTargetExpressions().size(),
private void compareSimpleConstraints(SingleConstraint single,
SingleConstraint newSingle) {
@@ -20,6 +20,10 @@ package org.apache.hadoop.yarn.api;
import java.io.File;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceInformation;
@@ -29,20 +33,19 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.proto.YarnProtos;
import org.apache.hadoop.yarn.util.resource.ResourceUtils;
import org.apache.hadoop.yarn.util.resource.TestResourceUtils;
-import org.junit.After;
-import org.junit.Before;
import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.Assert.assertEquals;
* Test class to handle various proto related tests for resources.
public class TestResourcePBImpl {
- @Before
+ @BeforeEach
public void setup() throws Exception {
ResourceUtils.resetResourceTypes();
@@ -51,7 +54,7 @@ public class TestResourcePBImpl {
TestResourceUtils.setupResourceTypes(conf, resourceTypesFile);
- @After
+ @AfterEach
public void teardown() {
Configuration conf = new YarnConfiguration();
File source = new File(
@@ -63,80 +66,80 @@ public class TestResourcePBImpl {
- public void testEmptyResourcePBInit() throws Exception {
+ void testEmptyResourcePBInit() throws Exception {
Resource res = new ResourcePBImpl();
// Assert to check it sets resource value and unit to default.
- Assert.assertEquals(0, res.getMemorySize());
- Assert.assertEquals(ResourceInformation.MEMORY_MB.getUnits(),
+ assertEquals(0, res.getMemorySize());
+ assertEquals(ResourceInformation.MEMORY_MB.getUnits(),
res.getResourceInformation(ResourceInformation.MEMORY_MB.getName())
.getUnits());
- Assert.assertEquals(ResourceInformation.VCORES.getUnits(),
+ assertEquals(ResourceInformation.VCORES.getUnits(),
res.getResourceInformation(ResourceInformation.VCORES.getName())
- public void testResourcePBInitFromOldPB() throws Exception {
+ void testResourcePBInitFromOldPB() throws Exception {
YarnProtos.ResourceProto proto =
YarnProtos.ResourceProto.newBuilder().setMemory(1024).setVirtualCores(3)
.build();
Resource res = new ResourcePBImpl(proto);
- Assert.assertEquals(1024, res.getMemorySize());
- Assert.assertEquals(3, res.getVirtualCores());
+ assertEquals(1024, res.getMemorySize());
+ assertEquals(3, res.getVirtualCores());
@SuppressWarnings("deprecation")
- public void testGetMemory() {
+ void testGetMemory() {
long memorySize = Integer.MAX_VALUE + 1L;
res.setMemorySize(memorySize);
- assertEquals("No need to cast if both are long", memorySize,
- res.getMemorySize());
- assertEquals("Cast to Integer.MAX_VALUE if the long is greater than "
- + "Integer.MAX_VALUE", Integer.MAX_VALUE, res.getMemory());
+ assertEquals(memorySize, res.getMemorySize(), "No need to cast if both are long");
+ assertEquals(Integer.MAX_VALUE, res.getMemory(),
+ "Cast to Integer.MAX_VALUE if the long is greater than " + "Integer.MAX_VALUE");
- public void testGetVirtualCores() {
+ void testGetVirtualCores() {
long vcores = Integer.MAX_VALUE + 1L;
res.getResourceInformation("vcores").setValue(vcores);
- assertEquals("No need to cast if both are long", vcores,
- res.getResourceInformation("vcores").getValue());
- + "Integer.MAX_VALUE", Integer.MAX_VALUE, res.getVirtualCores());
+ assertEquals(vcores,
+ res.getResourceInformation("vcores").getValue(),
+ "No need to cast if both are long");
+ assertEquals(Integer.MAX_VALUE, res.getVirtualCores(),
- public void testResourcePBWithExtraResources() throws Exception {
+ void testResourcePBWithExtraResources() throws Exception {
//Resource 'resource1' has been passed as 4T
//4T should be converted to 4000G
YarnProtos.ResourceInformationProto riProto =
YarnProtos.ResourceInformationProto.newBuilder().setType(
YarnProtos.ResourceTypeInfoProto.newBuilder().
- setName("resource1").setType(
+ setName("resource1").setType(
YarnProtos.ResourceTypesProto.COUNTABLE).getType()).
- setValue(4).setUnits("T").setKey("resource1").build();
+ setValue(4).setUnits("T").setKey("resource1").build();
YarnProtos.ResourceProto.newBuilder().setMemory(1024).
- setVirtualCores(3).addResourceValueMap(riProto).build();
+ setVirtualCores(3).addResourceValueMap(riProto).build();
- Assert.assertEquals(4000,
+ assertEquals(4000,
res.getResourceInformation("resource1").getValue());
- Assert.assertEquals("G",
+ assertEquals("G",
res.getResourceInformation("resource1").getUnits());
//Resource 'resource2' has been passed as 4M
@@ -144,18 +147,18 @@ public class TestResourcePBImpl {
YarnProtos.ResourceInformationProto riProto1 =
- setName("resource2").setType(
+ setName("resource2").setType(
- setValue(4).setUnits("M").setKey("resource2").build();
+ setValue(4).setUnits("M").setKey("resource2").build();
YarnProtos.ResourceProto proto1 =
- setVirtualCores(3).addResourceValueMap(riProto1).build();
+ setVirtualCores(3).addResourceValueMap(riProto1).build();
Resource res1 = new ResourcePBImpl(proto1);
- Assert.assertEquals(4000000000L,
+ assertEquals(4000000000L,
res1.getResourceInformation("resource2").getValue());
- Assert.assertEquals("m",
+ assertEquals("m",
res1.getResourceInformation("resource2").getUnits());
//Resource 'resource1' has been passed as 3M
@@ -163,23 +166,23 @@ public class TestResourcePBImpl {
YarnProtos.ResourceInformationProto riProto2 =
- setValue(3).setUnits("M").setKey("resource1").build();
+ setValue(3).setUnits("M").setKey("resource1").build();
YarnProtos.ResourceProto proto2 =
- setVirtualCores(3).addResourceValueMap(riProto2).build();
+ setVirtualCores(3).addResourceValueMap(riProto2).build();
Resource res2 = new ResourcePBImpl(proto2);
- Assert.assertEquals(0,
+ assertEquals(0,
res2.getResourceInformation("resource1").getValue());
res2.getResourceInformation("resource1").getUnits());
- public void testResourceTags() {
+ void testResourceTags() {
YarnProtos.ResourceInformationProto.newBuilder()
.setType(
@@ -201,19 +204,19 @@ public class TestResourcePBImpl {
- Assert.assertNotNull(res.getResourceInformation("yarn.io/test-volume"));
- Assert.assertEquals(10,
+ assertNotNull(res.getResourceInformation("yarn.io/test-volume"));
+ assertEquals(10,
res.getResourceInformation("yarn.io/test-volume")
.getValue());
- Assert.assertEquals(3,
+ assertEquals(3,
.getTags().size());
- Assert.assertFalse(res.getResourceInformation("yarn.io/test-volume")
+ assertFalse(res.getResourceInformation("yarn.io/test-volume")
.getTags().isEmpty());
- Assert.assertTrue(res.getResourceInformation("yarn.io/test-volume")
+ assertTrue(res.getResourceInformation("yarn.io/test-volume")
.getAttributes().isEmpty());
boolean protoConvertExpected = false;
@@ -225,13 +228,13 @@ public class TestResourcePBImpl {
&& pf.getTagsCount() == 3;
- Assert.assertTrue("Expecting resource's protobuf message"
- + " contains 0 attributes and 3 tags",
- protoConvertExpected);
+ assertTrue(protoConvertExpected,
+ "Expecting resource's protobuf message"
+ + " contains 0 attributes and 3 tags");
- public void testResourceAttributes() {
+ void testResourceAttributes() {
@@ -260,19 +263,19 @@ public class TestResourcePBImpl {
- Assert.assertEquals(2,
+ assertEquals(2,
.getAttributes().size());
@@ -284,20 +287,20 @@ public class TestResourcePBImpl {
&& pf.getTagsCount() == 0;
- + " contains 2 attributes and 0 tags",
+ + " contains 2 attributes and 0 tags");
- public void testParsingResourceTags() {
+ void testParsingResourceTags() {
ResourceInformation info =
ResourceUtils.getResourceTypes().get("resource3");
- Assert.assertTrue(info.getAttributes().isEmpty());
- Assert.assertFalse(info.getTags().isEmpty());
+ assertTrue(info.getAttributes().isEmpty());
+ assertFalse(info.getTags().isEmpty());
assertThat(info.getTags()).hasSize(2);
info.getTags().remove("resource3_tag_1");
info.getTags().remove("resource3_tag_2");
- Assert.assertTrue(info.getTags().isEmpty());
+ assertTrue(info.getTags().isEmpty());
@@ -17,13 +17,15 @@
import org.apache.hadoop.yarn.api.records.ExecutionType;
import org.apache.hadoop.yarn.api.records.ExecutionTypeRequest;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
* The class to test {@link ResourceRequest}.
@@ -31,7 +33,7 @@ import org.junit.Test;
public class TestResourceRequest {
- public void testEqualsOnExecutionTypeRequest() {
+ void testEqualsOnExecutionTypeRequest() {
ResourceRequest resourceRequestA =
ResourceRequest.newInstance(Priority.newInstance(0), "localhost",
Resource.newInstance(1024, 1), 1, false, "",
@@ -42,6 +44,6 @@ public class TestResourceRequest {
ExecutionTypeRequest.newInstance(ExecutionType.GUARANTEED, false));
- Assert.assertFalse(resourceRequestA.equals(resourceRequestB));
+ assertNotEquals(resourceRequestA, resourceRequestB);
@@ -18,15 +18,20 @@
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntityGroupId;
public class TestTimelineEntityGroupId {
- public void testTimelineEntityGroupId() {
+ void testTimelineEntityGroupId() {
ApplicationId appId1 = ApplicationId.newInstance(1234, 1);
ApplicationId appId2 = ApplicationId.newInstance(1234, 2);
TimelineEntityGroupId group1 = TimelineEntityGroupId.newInstance(appId1, "1");
@@ -34,19 +39,19 @@ public class TestTimelineEntityGroupId {
TimelineEntityGroupId group3 = TimelineEntityGroupId.newInstance(appId2, "1");
TimelineEntityGroupId group4 = TimelineEntityGroupId.newInstance(appId1, "1");
- Assert.assertTrue(group1.equals(group4));
- Assert.assertFalse(group1.equals(group2));
- Assert.assertFalse(group1.equals(group3));
+ assertEquals(group1, group4);
+ assertNotEquals(group1, group2);
+ assertNotEquals(group1, group3);
- Assert.assertTrue(group1.compareTo(group4) == 0);
- Assert.assertTrue(group1.compareTo(group2) < 0);
- Assert.assertTrue(group1.compareTo(group3) < 0);
+ assertTrue(group1.compareTo(group4) == 0);
+ assertTrue(group1.compareTo(group2) < 0);
+ assertTrue(group1.compareTo(group3) < 0);
- Assert.assertTrue(group1.hashCode() == group4.hashCode());
- Assert.assertFalse(group1.hashCode() == group2.hashCode());
- Assert.assertFalse(group1.hashCode() == group3.hashCode());
+ assertTrue(group1.hashCode() == group4.hashCode());
+ assertFalse(group1.hashCode() == group2.hashCode());
+ assertFalse(group1.hashCode() == group3.hashCode());
- Assert.assertEquals("timelineEntityGroupId_1234_1_1", group1.toString());
- Assert.assertEquals(TimelineEntityGroupId.fromString("timelineEntityGroupId_1234_1_1"), group1);
+ assertEquals("timelineEntityGroupId_1234_1_1", group1.toString());
+ assertEquals(TimelineEntityGroupId.fromString("timelineEntityGroupId_1234_1_1"), group1);
@@ -18,57 +18,64 @@
package org.apache.hadoop.yarn.api.protocolrecords.impl.pb;
-import static org.junit.Assert.assertNotEquals;
import java.util.Collection;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.MethodSource;
import org.apache.hadoop.util.Sets;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameter;
-import org.junit.runners.Parameterized.Parameters;
-@RunWith(Parameterized.class)
public class TestGetApplicationsRequestPBImpl {
- @Parameter
@SuppressWarnings("checkstyle:visibilitymodifier")
public GetApplicationsRequestPBImpl impl;
- public void testAppTagsLowerCaseConversionDefault() {
- impl.setApplicationTags(Sets.newHashSet("ABcd", "efgH"));
- impl.getApplicationTags().forEach(s ->
- assertEquals(s, s.toLowerCase()));
+ @MethodSource("data")
+ @ParameterizedTest
+ void testAppTagsLowerCaseConversionDefault(
+ GetApplicationsRequestPBImpl applicationsRequestPBImpl) {
+ initTestGetApplicationsRequestPBImpl(applicationsRequestPBImpl);
+ applicationsRequestPBImpl.setApplicationTags(Sets.newHashSet("ABcd", "efgH"));
+ applicationsRequestPBImpl.getApplicationTags().forEach(s -> assertEquals(s, s.toLowerCase()));
- public void testAppTagsLowerCaseConversionDisabled() {
+ void testAppTagsLowerCaseConversionDisabled(
GetApplicationsRequestPBImpl.setForceLowerCaseTags(false);
- assertNotEquals(s, s.toLowerCase()));
+ applicationsRequestPBImpl.getApplicationTags()
+ .forEach(s -> assertNotEquals(s, s.toLowerCase()));
- public void testAppTagsLowerCaseConversionEnabled() {
+ void testAppTagsLowerCaseConversionEnabled(
GetApplicationsRequestPBImpl.setForceLowerCaseTags(true);
- @Parameters
public static Collection<Object[]> data() {
List<Object[]> list = new ArrayList<>();
- list.add(new Object[] {new GetApplicationsRequestPBImpl()});
- list.add(new Object[] {new GetApplicationsRequestPBImpl(
+ list.add(new Object[]{new GetApplicationsRequestPBImpl()});
+ list.add(new Object[]{new GetApplicationsRequestPBImpl(
GetApplicationsRequestProto.newBuilder().build())});
return list;
+ public void initTestGetApplicationsRequestPBImpl(
+ this.impl = impl;
+ }
@@ -18,54 +18,58 @@
package org.apache.hadoop.yarn.api.records;
import java.util.HashMap;
import java.util.Map;
public class TestResourceUtilization {
- public void testResourceUtilization() {
+ void testResourceUtilization() {
ResourceUtilization u1 = ResourceUtilization.newInstance(10, 20, 0.5f);
ResourceUtilization u2 = ResourceUtilization.newInstance(u1);
ResourceUtilization u3 = ResourceUtilization.newInstance(10, 20, 0.5f);
ResourceUtilization u4 = ResourceUtilization.newInstance(20, 20, 0.5f);
ResourceUtilization u5 = ResourceUtilization.newInstance(30, 40, 0.8f);
- Assert.assertEquals(u1, u2);
- Assert.assertEquals(u1, u3);
- Assert.assertNotEquals(u1, u4);
- Assert.assertNotEquals(u2, u5);
- Assert.assertNotEquals(u4, u5);
+ assertEquals(u1, u2);
+ assertEquals(u1, u3);
+ assertNotEquals(u1, u4);
+ assertNotEquals(u2, u5);
+ assertNotEquals(u4, u5);
- Assert.assertTrue(u1.hashCode() == u2.hashCode());
- Assert.assertTrue(u1.hashCode() == u3.hashCode());
- Assert.assertFalse(u1.hashCode() == u4.hashCode());
- Assert.assertFalse(u2.hashCode() == u5.hashCode());
- Assert.assertFalse(u4.hashCode() == u5.hashCode());
+ assertTrue(u1.hashCode() == u2.hashCode());
+ assertTrue(u1.hashCode() == u3.hashCode());
+ assertFalse(u1.hashCode() == u4.hashCode());
+ assertFalse(u2.hashCode() == u5.hashCode());
+ assertFalse(u4.hashCode() == u5.hashCode());
- Assert.assertTrue(u1.getPhysicalMemory() == 10);
- Assert.assertFalse(u1.getVirtualMemory() == 10);
- Assert.assertTrue(u1.getCPU() == 0.5f);
+ assertTrue(u1.getPhysicalMemory() == 10);
+ assertFalse(u1.getVirtualMemory() == 10);
+ assertTrue(u1.getCPU() == 0.5f);
- Assert.assertEquals("<pmem:10, vmem:" + u1.getVirtualMemory()
+ assertEquals("<pmem:10, vmem:" + u1.getVirtualMemory()
+ ", vCores:0.5>", u1.toString());
u1.addTo(10, 0, 0.0f);
- Assert.assertNotEquals(u1, u2);
- Assert.assertEquals(u1, u4);
+ assertNotEquals(u1, u2);
+ assertEquals(u1, u4);
u1.addTo(10, 20, 0.3f);
- Assert.assertEquals(u1, u5);
+ assertEquals(u1, u5);
u1.subtractFrom(10, 20, 0.3f);
u1.subtractFrom(10, 0, 0.0f);
- public void testResourceUtilizationWithCustomResource() {
+ void testResourceUtilizationWithCustomResource() {
Map<String, Float> customResources = new HashMap<>();
customResources.put(ResourceInformation.GPU_URI, 5.0f);
ResourceUtilization u1 = ResourceUtilization.
@@ -78,35 +82,35 @@ public class TestResourceUtilization {
ResourceUtilization u5 = ResourceUtilization.
newInstance(30, 40, 0.8f, customResources);
- Assert.assertTrue(u1.
+ assertTrue(u1.
getCustomResource(ResourceInformation.GPU_URI) == 5.0f);
+ ", vCores:0.5, yarn.io/gpu:5.0>", u1.toString());
@@ -25,6 +25,8 @@ import java.util.HashMap;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.security.Credentials;
@@ -36,8 +38,10 @@ import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
import org.apache.hadoop.yarn.api.records.URL;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
public class TestApplicationClientProtocolRecords {
@@ -47,7 +51,7 @@ public class TestApplicationClientProtocolRecords {
*
- public void testCLCPBImplNullEnv() throws IOException {
+ void testCLCPBImplNullEnv() throws IOException {
Map<String, LocalResource> localResources = Collections.emptyMap();
Map<String, String> environment = new HashMap<String, String>();
List<String> commands = Collections.emptyList();
@@ -68,7 +72,7 @@ public class TestApplicationClientProtocolRecords {
ContainerLaunchContext clcProto = new ContainerLaunchContextPBImpl(
((ContainerLaunchContextPBImpl) clc).getProto());
- Assert.assertEquals("",
+ assertEquals("",
clcProto.getEnvironment().get("testCLCPBImplNullEnv"));
@@ -78,7 +82,7 @@ public class TestApplicationClientProtocolRecords {
* local resource URL.
- public void testCLCPBImplNullResourceURL() throws IOException {
+ void testCLCPBImplNullResourceURL() throws IOException {
RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class);
@@ -92,9 +96,9 @@ public class TestApplicationClientProtocolRecords {
localResources.put("null_url_resource", rsrc_alpha);
ContainerLaunchContext containerLaunchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class);
containerLaunchContext.setLocalResources(localResources);
- Assert.fail("Setting an invalid local resource should be an error!");
+ fail("Setting an invalid local resource should be an error!");
} catch (NullPointerException e) {
- Assert.assertTrue(e.getMessage().contains("Null resource URL for local resource"));
+ assertTrue(e.getMessage().contains("Null resource URL for local resource"));
@@ -103,7 +107,7 @@ public class TestApplicationClientProtocolRecords {
* local resource type.
- public void testCLCPBImplNullResourceType() throws IOException {
+ void testCLCPBImplNullResourceType() throws IOException {
LocalResource resource = recordFactory.newRecordInstance(LocalResource.class);
@@ -117,9 +121,9 @@ public class TestApplicationClientProtocolRecords {
localResources.put("null_type_resource", resource);
- Assert.assertTrue(e.getMessage().contains("Null resource type for local resource"));
+ assertTrue(e.getMessage().contains("Null resource type for local resource"));
@@ -128,7 +132,7 @@ public class TestApplicationClientProtocolRecords {
- public void testCLCPBImplNullResourceVisibility() throws IOException {
+ void testCLCPBImplNullResourceVisibility() throws IOException {
@@ -142,9 +146,9 @@ public class TestApplicationClientProtocolRecords {
localResources.put("null_visibility_resource", resource);
- Assert.assertTrue(e.getMessage().contains("Null resource visibility for local resource"));
+ assertTrue(e.getMessage().contains("Null resource visibility for local resource"));
@@ -18,57 +18,66 @@
package org.apache.hadoop.yarn.api.records.impl.pb;
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto;
public class TestApplicationSubmissionContextPBImpl {
public ApplicationSubmissionContextPBImpl impl;
+ ApplicationSubmissionContextPBImpl applicationSubmissionContextPB) {
+ initTestApplicationSubmissionContextPBImpl(applicationSubmissionContextPB);
+ applicationSubmissionContextPB.setApplicationTags(Sets.newHashSet("ABcd", "efgH"));
+ applicationSubmissionContextPB.getApplicationTags()
+ .forEach(s -> assertEquals(s, s.toLowerCase()));
ApplicationSubmissionContextPBImpl.setForceLowerCaseTags(false);
ApplicationSubmissionContextPBImpl.setForceLowerCaseTags(true);
- list.add(new Object[] {new ApplicationSubmissionContextPBImpl()});
- list.add(new Object[] {new ApplicationSubmissionContextPBImpl(
+ list.add(new Object[]{new ApplicationSubmissionContextPBImpl()});
+ list.add(new Object[]{new ApplicationSubmissionContextPBImpl(
ApplicationSubmissionContextProto.newBuilder().build())});
+ public void initTestApplicationSubmissionContextPBImpl(
+ this.impl = applicationSubmissionContextPB;
@@ -17,20 +17,21 @@
-import static org.junit.Assert.*;
import java.util.stream.Stream;
import org.apache.hadoop.yarn.api.records.ContainerState;
import org.apache.hadoop.yarn.api.records.ContainerSubState;
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto;
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerSubStateProto;
public class TestProtoUtils {
- public void testConvertFromOrToProtoFormat() {
+ void testConvertFromOrToProtoFormat() {
// Check if utility has all enum values
Stream.of(ContainerState.values())
@@ -20,71 +20,75 @@ package org.apache.hadoop.yarn.api.records.impl.pb;
import java.nio.channels.ClosedChannelException;
import org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto;
public class TestSerializedExceptionPBImpl {
- public void testSerializedException() throws Exception {
+ void testSerializedException() throws Exception {
SerializedExceptionPBImpl orig = new SerializedExceptionPBImpl();
orig.init(new Exception("test exception"));
SerializedExceptionProto proto = orig.getProto();
SerializedExceptionPBImpl deser = new SerializedExceptionPBImpl(proto);
- Assert.assertEquals(orig, deser);
- Assert.assertEquals(orig.getMessage(), deser.getMessage());
- Assert.assertEquals(orig.getRemoteTrace(), deser.getRemoteTrace());
- Assert.assertEquals(orig.getCause(), deser.getCause());
+ assertEquals(orig, deser);
+ assertEquals(orig.getMessage(), deser.getMessage());
+ assertEquals(orig.getRemoteTrace(), deser.getRemoteTrace());
+ assertEquals(orig.getCause(), deser.getCause());
- public void testDeserialize() throws Exception {
+ void testDeserialize() throws Exception {
Exception ex = new Exception("test exception");
SerializedExceptionPBImpl pb = new SerializedExceptionPBImpl();
pb.deSerialize();
- Assert.fail("deSerialze should throw YarnRuntimeException");
+ fail("deSerialze should throw YarnRuntimeException");
- Assert.assertEquals(ClassNotFoundException.class,
+ assertEquals(ClassNotFoundException.class,
e.getCause().getClass());
pb.init(ex);
- Assert.assertEquals(ex.toString(), pb.deSerialize().toString());
+ assertEquals(ex.toString(), pb.deSerialize().toString());
- public void testDeserializeWithDefaultConstructor() {
+ void testDeserializeWithDefaultConstructor() {
// Init SerializedException with an Exception with default constructor.
ClosedChannelException ex = new ClosedChannelException();
- Assert.assertEquals(ex.getClass(), pb.deSerialize().getClass());
+ assertEquals(ex.getClass(), pb.deSerialize().getClass());
- public void testBeforeInit() throws Exception {
+ void testBeforeInit() throws Exception {
SerializedExceptionProto defaultProto =
SerializedExceptionProto.newBuilder().build();
SerializedExceptionPBImpl pb1 = new SerializedExceptionPBImpl();
- Assert.assertNull(pb1.getCause());
+ assertNull(pb1.getCause());
SerializedExceptionPBImpl pb2 = new SerializedExceptionPBImpl();
- Assert.assertEquals(defaultProto, pb2.getProto());
+ assertEquals(defaultProto, pb2.getProto());
SerializedExceptionPBImpl pb3 = new SerializedExceptionPBImpl();
- Assert.assertEquals(defaultProto.getTrace(), pb3.getRemoteTrace());
+ assertEquals(defaultProto.getTrace(), pb3.getRemoteTrace());
- public void testThrowableDeserialization() {
+ void testThrowableDeserialization() {
// java.lang.Error should also be serializable
Error ex = new Error();
@@ -27,12 +27,16 @@ import java.util.Set;
import java.util.TreeMap;
import java.util.WeakHashMap;
import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError;
import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
public class TestTimelineRecords {
@@ -40,7 +44,7 @@ public class TestTimelineRecords {
LoggerFactory.getLogger(TestTimelineRecords.class);
- public void testEntities() throws Exception {
+ void testEntities() throws Exception {
TimelineEntities entities = new TimelineEntities();
for (int j = 0; j < 2; ++j) {
TimelineEntity entity = new TimelineEntity();
@@ -67,27 +71,27 @@ public class TestTimelineRecords {
LOG.info("Entities in JSON:");
LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(entities, true));
- Assert.assertEquals(2, entities.getEntities().size());
+ assertEquals(2, entities.getEntities().size());
TimelineEntity entity1 = entities.getEntities().get(0);
- Assert.assertEquals("entity id 0", entity1.getEntityId());
- Assert.assertEquals("entity type 0", entity1.getEntityType());
- Assert.assertEquals(2, entity1.getRelatedEntities().size());
- Assert.assertEquals(2, entity1.getEvents().size());
- Assert.assertEquals(2, entity1.getPrimaryFilters().size());
- Assert.assertEquals(2, entity1.getOtherInfo().size());
- Assert.assertEquals("domain id 0", entity1.getDomainId());
+ assertEquals("entity id 0", entity1.getEntityId());
+ assertEquals("entity type 0", entity1.getEntityType());
+ assertEquals(2, entity1.getRelatedEntities().size());
+ assertEquals(2, entity1.getEvents().size());
+ assertEquals(2, entity1.getPrimaryFilters().size());
+ assertEquals(2, entity1.getOtherInfo().size());
+ assertEquals("domain id 0", entity1.getDomainId());
TimelineEntity entity2 = entities.getEntities().get(1);
- Assert.assertEquals("entity id 1", entity2.getEntityId());
- Assert.assertEquals("entity type 1", entity2.getEntityType());
- Assert.assertEquals(2, entity2.getRelatedEntities().size());
- Assert.assertEquals(2, entity2.getEvents().size());
- Assert.assertEquals(2, entity2.getPrimaryFilters().size());
- Assert.assertEquals(2, entity2.getOtherInfo().size());
- Assert.assertEquals("domain id 1", entity2.getDomainId());
+ assertEquals("entity id 1", entity2.getEntityId());
+ assertEquals("entity type 1", entity2.getEntityType());
+ assertEquals(2, entity2.getRelatedEntities().size());
+ assertEquals(2, entity2.getEvents().size());
+ assertEquals(2, entity2.getPrimaryFilters().size());
+ assertEquals(2, entity2.getOtherInfo().size());
+ assertEquals("domain id 1", entity2.getDomainId());
- public void testEvents() throws Exception {
+ void testEvents() throws Exception {
TimelineEvents events = new TimelineEvents();
TimelineEvents.EventsOfOneEntity partEvents =
@@ -107,31 +111,31 @@ public class TestTimelineRecords {
LOG.info("Events in JSON:");
LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(events, true));
- Assert.assertEquals(2, events.getAllEvents().size());
+ assertEquals(2, events.getAllEvents().size());
TimelineEvents.EventsOfOneEntity partEvents1 = events.getAllEvents().get(0);
- Assert.assertEquals("entity id 0", partEvents1.getEntityId());
- Assert.assertEquals("entity type 0", partEvents1.getEntityType());
- Assert.assertEquals(2, partEvents1.getEvents().size());
+ assertEquals("entity id 0", partEvents1.getEntityId());
+ assertEquals("entity type 0", partEvents1.getEntityType());
+ assertEquals(2, partEvents1.getEvents().size());
TimelineEvent event11 = partEvents1.getEvents().get(0);
- Assert.assertEquals("event type 0", event11.getEventType());
- Assert.assertEquals(2, event11.getEventInfo().size());
+ assertEquals("event type 0", event11.getEventType());
+ assertEquals(2, event11.getEventInfo().size());
TimelineEvent event12 = partEvents1.getEvents().get(1);
- Assert.assertEquals("event type 1", event12.getEventType());
- Assert.assertEquals(2, event12.getEventInfo().size());
+ assertEquals("event type 1", event12.getEventType());
+ assertEquals(2, event12.getEventInfo().size());
TimelineEvents.EventsOfOneEntity partEvents2 = events.getAllEvents().get(1);
- Assert.assertEquals("entity id 1", partEvents2.getEntityId());
- Assert.assertEquals("entity type 1", partEvents2.getEntityType());
- Assert.assertEquals(2, partEvents2.getEvents().size());
+ assertEquals("entity id 1", partEvents2.getEntityId());
+ assertEquals("entity type 1", partEvents2.getEntityType());
+ assertEquals(2, partEvents2.getEvents().size());
TimelineEvent event21 = partEvents2.getEvents().get(0);
- Assert.assertEquals("event type 0", event21.getEventType());
- Assert.assertEquals(2, event21.getEventInfo().size());
+ assertEquals("event type 0", event21.getEventType());
+ assertEquals(2, event21.getEventInfo().size());
TimelineEvent event22 = partEvents2.getEvents().get(1);
- Assert.assertEquals("event type 1", event22.getEventType());
- Assert.assertEquals(2, event22.getEventInfo().size());
+ assertEquals("event type 1", event22.getEventType());
+ assertEquals(2, event22.getEventInfo().size());
- public void testTimelinePutErrors() throws Exception {
+ void testTimelinePutErrors() throws Exception {
TimelinePutResponse TimelinePutErrors = new TimelinePutResponse();
TimelinePutError error1 = new TimelinePutError();
error1.setEntityId("entity id 1");
@@ -149,23 +153,23 @@ public class TestTimelineRecords {
LOG.info("Errors in JSON:");
LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(TimelinePutErrors, true));
- Assert.assertEquals(3, TimelinePutErrors.getErrors().size());
+ assertEquals(3, TimelinePutErrors.getErrors().size());
TimelinePutError e = TimelinePutErrors.getErrors().get(0);
- Assert.assertEquals(error1.getEntityId(), e.getEntityId());
- Assert.assertEquals(error1.getEntityType(), e.getEntityType());
- Assert.assertEquals(error1.getErrorCode(), e.getErrorCode());
+ assertEquals(error1.getEntityId(), e.getEntityId());
+ assertEquals(error1.getEntityType(), e.getEntityType());
+ assertEquals(error1.getErrorCode(), e.getErrorCode());
e = TimelinePutErrors.getErrors().get(1);
e = TimelinePutErrors.getErrors().get(2);
- Assert.assertEquals(error2.getEntityId(), e.getEntityId());
- Assert.assertEquals(error2.getEntityType(), e.getEntityType());
- Assert.assertEquals(error2.getErrorCode(), e.getErrorCode());
+ assertEquals(error2.getEntityId(), e.getEntityId());
+ assertEquals(error2.getEntityType(), e.getEntityType());
+ assertEquals(error2.getErrorCode(), e.getErrorCode());
- public void testTimelineDomain() throws Exception {
+ void testTimelineDomain() throws Exception {
TimelineDomains domains = new TimelineDomains();
TimelineDomain domain = null;
@@ -185,25 +189,25 @@ public class TestTimelineRecords {
LOG.info("Domain in JSON:");
LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(domains, true));
- Assert.assertEquals(2, domains.getDomains().size());
+ assertEquals(2, domains.getDomains().size());
for (int i = 0; i < domains.getDomains().size(); ++i) {
domain = domains.getDomains().get(i);
- Assert.assertEquals("test id " + (i + 1), domain.getId());
- Assert.assertEquals("test description " + (i + 1),
+ assertEquals("test id " + (i + 1), domain.getId());
+ assertEquals("test description " + (i + 1),
domain.getDescription());
- Assert.assertEquals("test owner " + (i + 1), domain.getOwner());
- Assert.assertEquals("test_reader_user_" + (i + 1) +
+ assertEquals("test owner " + (i + 1), domain.getOwner());
+ assertEquals("test_reader_user_" + (i + 1) +
" test_reader_group+" + (i + 1), domain.getReaders());
- Assert.assertEquals("test_writer_user_" + (i + 1) +
+ assertEquals("test_writer_user_" + (i + 1) +
" test_writer_group+" + (i + 1), domain.getWriters());
- Assert.assertEquals(new Long(0L), domain.getCreatedTime());
- Assert.assertEquals(new Long(1L), domain.getModifiedTime());
+ assertEquals(Long.valueOf(0L), domain.getCreatedTime());
+ assertEquals(Long.valueOf(1L), domain.getModifiedTime());
- public void testMapInterfaceOrTimelineRecords() throws Exception {
+ void testMapInterfaceOrTimelineRecords() throws Exception {
List<Map<String, Set<Object>>> primaryFiltersList =
new ArrayList<Map<String, Set<Object>>>();
@@ -284,36 +288,36 @@ public class TestTimelineRecords {
private static void assertPrimaryFilters(TimelineEntity entity) {
- Assert.assertNotNull(entity.getPrimaryFilters());
- Assert.assertNotNull(entity.getPrimaryFiltersJAXB());
- Assert.assertTrue(entity.getPrimaryFilters() instanceof HashMap);
- Assert.assertTrue(entity.getPrimaryFiltersJAXB() instanceof HashMap);
+ assertNotNull(entity.getPrimaryFilters());
+ assertNotNull(entity.getPrimaryFiltersJAXB());
+ assertTrue(entity.getPrimaryFilters() instanceof HashMap);
+ assertTrue(entity.getPrimaryFiltersJAXB() instanceof HashMap);
entity.getPrimaryFilters(), entity.getPrimaryFiltersJAXB());
private static void assertRelatedEntities(TimelineEntity entity) {
- Assert.assertNotNull(entity.getRelatedEntities());
- Assert.assertNotNull(entity.getRelatedEntitiesJAXB());
- Assert.assertTrue(entity.getRelatedEntities() instanceof HashMap);
- Assert.assertTrue(entity.getRelatedEntitiesJAXB() instanceof HashMap);
+ assertNotNull(entity.getRelatedEntities());
+ assertNotNull(entity.getRelatedEntitiesJAXB());
+ assertTrue(entity.getRelatedEntities() instanceof HashMap);
+ assertTrue(entity.getRelatedEntitiesJAXB() instanceof HashMap);
entity.getRelatedEntities(), entity.getRelatedEntitiesJAXB());
private static void assertOtherInfo(TimelineEntity entity) {
- Assert.assertNotNull(entity.getOtherInfo());
- Assert.assertNotNull(entity.getOtherInfoJAXB());
- Assert.assertTrue(entity.getOtherInfo() instanceof HashMap);
- Assert.assertTrue(entity.getOtherInfoJAXB() instanceof HashMap);
- Assert.assertEquals(entity.getOtherInfo(), entity.getOtherInfoJAXB());
+ assertNotNull(entity.getOtherInfo());
+ assertNotNull(entity.getOtherInfoJAXB());
+ assertTrue(entity.getOtherInfo() instanceof HashMap);
+ assertTrue(entity.getOtherInfoJAXB() instanceof HashMap);
+ assertEquals(entity.getOtherInfo(), entity.getOtherInfoJAXB());
private static void assertEventInfo(TimelineEvent event) {
- Assert.assertNotNull(event);
- Assert.assertNotNull(event.getEventInfoJAXB());
- Assert.assertTrue(event.getEventInfo() instanceof HashMap);
- Assert.assertTrue(event.getEventInfoJAXB() instanceof HashMap);
- Assert.assertEquals(event.getEventInfo(), event.getEventInfoJAXB());
+ assertNotNull(event);
+ assertNotNull(event.getEventInfoJAXB());
+ assertTrue(event.getEventInfo() instanceof HashMap);
+ assertTrue(event.getEventInfoJAXB() instanceof HashMap);
+ assertEquals(event.getEventInfo(), event.getEventInfoJAXB());
@@ -17,22 +17,28 @@
package org.apache.hadoop.yarn.api.records.timelineservice;
+import java.util.Arrays;
+import java.util.Collections;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
public class TestTimelineServiceRecords {
@@ -40,7 +46,7 @@ public class TestTimelineServiceRecords {
LoggerFactory.getLogger(TestTimelineServiceRecords.class);
- public void testTimelineEntities() throws Exception {
+ void testTimelineEntities() throws Exception {
entity.setType("test type 1");
entity.setId("test id 1");
@@ -48,7 +54,7 @@ public class TestTimelineServiceRecords {
entity.addInfo("test info key 2",
Arrays.asList("test info value 2", "test info value 3"));
entity.addInfo("test info key 3", true);
- Assert.assertTrue(
+ assertTrue(
entity.getInfo().get("test info key 3") instanceof Boolean);
entity.addConfig("test config key 1", "test config value 1");
entity.addConfig("test config key 2", "test config value 2");
@@ -59,43 +65,43 @@ public class TestTimelineServiceRecords {
metric1.addValue(1L, 1.0F);
metric1.addValue(3L, 3.0D);
metric1.addValue(2L, 2);
- Assert.assertEquals(TimelineMetric.Type.TIME_SERIES, metric1.getType());
+ assertEquals(TimelineMetric.Type.TIME_SERIES, metric1.getType());
Iterator<Map.Entry<Long, Number>> itr =
metric1.getValues().entrySet().iterator();
Map.Entry<Long, Number> entry = itr.next();
- Assert.assertEquals(new Long(3L), entry.getKey());
- Assert.assertEquals(3.0D, entry.getValue());
+ assertEquals(Long.valueOf(3L), entry.getKey());
+ assertEquals(3.0D, entry.getValue());
entry = itr.next();
- Assert.assertEquals(new Long(2L), entry.getKey());
- Assert.assertEquals(2, entry.getValue());
+ assertEquals(Long.valueOf(2L), entry.getKey());
+ assertEquals(2, entry.getValue());
- Assert.assertEquals(new Long(1L), entry.getKey());
- Assert.assertEquals(1.0F, entry.getValue());
- Assert.assertFalse(itr.hasNext());
+ assertEquals(Long.valueOf(1L), entry.getKey());
+ assertEquals(1.0F, entry.getValue());
+ assertFalse(itr.hasNext());
entity.addMetric(metric1);
TimelineMetric metric2 =
new TimelineMetric(TimelineMetric.Type.SINGLE_VALUE);
metric2.setId("test metric id 1");
metric2.addValue(3L, (short) 3);
- Assert.assertEquals(TimelineMetric.Type.SINGLE_VALUE, metric2.getType());
+ assertEquals(TimelineMetric.Type.SINGLE_VALUE, metric2.getType());
metric2.getValues().values().iterator().next() instanceof Short);
Map<Long, Number> points = new HashMap<>();
points.put(4L, 4.0D);
points.put(5L, 5.0D);
metric2.setValues(points);
- Assert.fail();
+ fail();
} catch (IllegalArgumentException e) {
- Assert.assertTrue(e.getMessage().contains(
+ assertTrue(e.getMessage().contains(
"Values cannot contain more than one point in"));
metric2.addValues(points);
entity.addMetric(metric2);
@@ -104,9 +110,8 @@ public class TestTimelineServiceRecords {
metric3.setId("test metric id 1");
metric3.addValue(4L, (short) 4);
- Assert.assertEquals("metric3 should equal to metric2! ", metric3, metric2);
- Assert.assertNotEquals("metric1 should not equal to metric2! ",
- metric1, metric2);
+ assertEquals(metric3, metric2, "metric3 should equal to metric2! ");
+ assertNotEquals(metric1, metric2, "metric1 should not equal to metric2! ");
TimelineEvent event1 = new TimelineEvent();
event1.setId("test event id 1");
@@ -114,7 +119,7 @@ public class TestTimelineServiceRecords {
event1.addInfo("test info key 2",
event1.addInfo("test info key 3", true);
event1.getInfo().get("test info key 3") instanceof Boolean);
event1.setTimestamp(1L);
entity.addEvent(event1);
@@ -125,19 +130,17 @@ public class TestTimelineServiceRecords {
event2.addInfo("test info key 2",
event2.addInfo("test info key 3", true);
event2.getInfo().get("test info key 3") instanceof Boolean);
event2.setTimestamp(2L);
entity.addEvent(event2);
- Assert.assertFalse("event1 should not equal to event2! ",
- event1.equals(event2));
+ assertNotEquals(event1, event2);
TimelineEvent event3 = new TimelineEvent();
event3.setId("test event id 1");
event3.setTimestamp(1L);
- Assert.assertEquals("event1 should equal to event3! ", event3, event1);
- Assert.assertNotEquals("event1 should not equal to event2! ",
- event1, event2);
+ assertEquals(event3, event1, "event1 should equal to event3! ");
+ assertNotEquals(event1, event2, "event1 should not equal to event2! ");
entity.setCreatedTime(0L);
entity.addRelatesToEntity("test type 2", "test id 2");
@@ -153,25 +156,22 @@ public class TestTimelineServiceRecords {
entities.addEntity(entity2);
- Assert.assertFalse("entity 1 should not be valid without type and id",
- entity1.isValid());
+ assertFalse(entity1.isValid(),
+ "entity 1 should not be valid without type and id");
entity1.setId("test id 2");
entity1.setType("test type 2");
entity2.setId("test id 1");
entity2.setType("test type 1");
- Assert.assertEquals("Timeline entity should equal to entity2! ",
- entity, entity2);
- Assert.assertNotEquals("entity1 should not equal to entity! ",
- entity1, entity);
- Assert.assertEquals("entity should be less than entity1! ",
- entity1.compareTo(entity), 1);
- Assert.assertEquals("entity's hash code should be -28727840 but not "
- + entity.hashCode(), entity.hashCode(), -28727840);
+ assertEquals(entity, entity2, "Timeline entity should equal to entity2! ");
+ assertNotEquals(entity1, entity, "entity1 should not equal to entity! ");
+ assertEquals(entity1.compareTo(entity), 1, "entity should be less than entity1! ");
+ assertEquals(entity.hashCode(), -28727840, "entity's hash code should be -28727840 but not "
+ + entity.hashCode());
- public void testFirstClassCitizenEntities() throws Exception {
+ void testFirstClassCitizenEntities() throws Exception {
UserEntity user = new UserEntity();
user.setId("test user id");
@@ -245,49 +245,49 @@ public class TestTimelineServiceRecords {
// Check parent/children APIs
- Assert.assertNotNull(app1.getParent());
- Assert.assertEquals(flow2.getType(), app1.getParent().getType());
- Assert.assertEquals(flow2.getId(), app1.getParent().getId());
+ assertNotNull(app1.getParent());
+ assertEquals(flow2.getType(), app1.getParent().getType());
+ assertEquals(flow2.getId(), app1.getParent().getId());
app1.addInfo(ApplicationEntity.PARENT_INFO_KEY, "invalid parent object");
app1.getParent();
- Assert.assertTrue(e instanceof YarnRuntimeException);
+ assertTrue(e instanceof YarnRuntimeException);
"Parent info is invalid identifier object"));
- Assert.assertNotNull(app1.getChildren());
- Assert.assertEquals(1, app1.getChildren().size());
+ assertNotNull(app1.getChildren());
+ assertEquals(1, app1.getChildren().size());
appAttempt.getType(), app1.getChildren().iterator().next().getType());
appAttempt.getId(), app1.getChildren().iterator().next().getId());
app1.addInfo(ApplicationEntity.CHILDREN_INFO_KEY,
Collections.singletonList("invalid children set"));
app1.getChildren();
"Children info is invalid identifier set"));
Collections.singleton("invalid child object"));
"Children info contains invalid identifier object"));
- public void testUser() throws Exception {
+ void testUser() throws Exception {
user.addInfo("test info key 1", "test info value 1");
@@ -296,7 +296,7 @@ public class TestTimelineServiceRecords {
- public void testQueue() throws Exception {
+ void testQueue() throws Exception {
QueueEntity queue = new QueueEntity();
queue.setId("test queue id");
queue.addInfo("test info key 1", "test info value 1");
@@ -18,18 +18,12 @@
package org.apache.hadoop.yarn.api.resource;
import org.apache.hadoop.yarn.api.resource.PlacementConstraint.AbstractConstraint;
import org.apache.hadoop.yarn.api.resource.PlacementConstraint.CardinalityConstraint;
import org.apache.hadoop.yarn.api.resource.PlacementConstraint.Or;
@@ -39,8 +33,16 @@ import org.apache.hadoop.yarn.api.resource.PlacementConstraint.TargetConstraint.
import org.apache.hadoop.yarn.api.resource.PlacementConstraintTransformations.SingleConstraintTransformer;
import org.apache.hadoop.yarn.api.resource.PlacementConstraintTransformations.SpecializedConstraintTransformer;
import org.apache.hadoop.yarn.api.resource.PlacementConstraints.PlacementTargets;
* Test class for {@link PlacementConstraintTransformations}.
@@ -48,10 +50,10 @@ import org.junit.Test;
public class TestPlacementConstraintTransformations {
- public void testTargetConstraint() {
+ void testTargetConstraint() {
@@ -61,17 +63,17 @@ public class TestPlacementConstraintTransformations {
PlacementConstraint tConstraint = specTransformer.transform();
AbstractConstraint tConstraintExpr = tConstraint.getConstraintExpr();
- Assert.assertTrue(tConstraintExpr instanceof TargetConstraint);
+ assertTrue(tConstraintExpr instanceof TargetConstraint);
TargetConstraint target = (TargetConstraint) tConstraintExpr;
// Make sure the expression string is consistent
// before and after transforming
- Assert.assertEquals(single.toString(), target.toString());
- Assert.assertEquals(single.getScope(), target.getScope());
- Assert.assertEquals(TargetOperator.IN, target.getOp());
+ assertEquals(single.toString(), target.toString());
+ assertEquals(single.getScope(), target.getScope());
+ assertEquals(TargetOperator.IN, target.getOp());
target.getTargetExpressions());
// Transform from specialized TargetConstraint to SimpleConstraint
@@ -80,18 +82,18 @@ public class TestPlacementConstraintTransformations {
sConstraint = singleTransformer.transform();
sConstraintExpr = sConstraint.getConstraintExpr();
single = (SingleConstraint) sConstraintExpr;
- Assert.assertEquals(target.getScope(), single.getScope());
- Assert.assertEquals(1, single.getMinCardinality());
- Assert.assertEquals(Integer.MAX_VALUE, single.getMaxCardinality());
+ assertEquals(target.getScope(), single.getScope());
+ assertEquals(1, single.getMinCardinality());
+ assertEquals(Integer.MAX_VALUE, single.getMaxCardinality());
- public void testCardinalityConstraint() {
+ void testCardinalityConstraint() {
CardinalityConstraint cardinality = new CardinalityConstraint(RACK, 3, 10,
new HashSet<>(Arrays.asList("hb")));
PlacementConstraint cConstraint = PlacementConstraints.build(cardinality);
@@ -102,27 +104,27 @@ public class TestPlacementConstraintTransformations {
PlacementConstraint sConstraint = singleTransformer.transform();
AbstractConstraint sConstraintExpr = sConstraint.getConstraintExpr();
// Make sure the consistent expression string is consistent
- Assert.assertEquals(single.toString(), cardinality.toString());
- Assert.assertEquals(cardinality.getScope(), single.getScope());
- Assert.assertEquals(cardinality.getMinCardinality(),
+ assertEquals(single.toString(), cardinality.toString());
+ assertEquals(cardinality.getScope(), single.getScope());
+ assertEquals(cardinality.getMinCardinality(),
single.getMinCardinality());
- Assert.assertEquals(cardinality.getMaxCardinality(),
+ assertEquals(cardinality.getMaxCardinality(),
single.getMaxCardinality());
new HashSet<>(Arrays.asList(PlacementTargets.allocationTag("hb"))),
single.getTargetExpressions());
- public void testTargetCardinalityConstraint() {
+ void testTargetCardinalityConstraint() {
targetCardinality(RACK, 3, 10, allocationTag("zk"));
- Assert.assertTrue(constraintExpr instanceof SingleConstraint);
+ assertTrue(constraintExpr instanceof SingleConstraint);
// Apply transformation. Should be a no-op.
@@ -131,19 +133,19 @@ public class TestPlacementConstraintTransformations {
PlacementConstraint newConstraint = specTransformer.transform();
// The constraint expression should be the same.
- Assert.assertEquals(constraintExpr, newConstraint.getConstraintExpr());
+ assertEquals(constraintExpr, newConstraint.getConstraintExpr());
- public void testCompositeConstraint() {
+ void testCompositeConstraint() {
for (AbstractConstraint child : orExpr.getChildren()) {
- Assert.assertTrue(child instanceof SingleConstraint);
+ assertTrue(child instanceof SingleConstraint);
// Apply transformation. Should transform target and cardinality constraints
@@ -154,19 +156,19 @@ public class TestPlacementConstraintTransformations {
Or specOrExpr = (Or) specConstraint.getConstraintExpr();
List<AbstractConstraint> specChildren = specOrExpr.getChildren();
- Assert.assertEquals(3, specChildren.size());
- Assert.assertTrue(specChildren.get(0) instanceof TargetConstraint);
- Assert.assertTrue(specChildren.get(1) instanceof SingleConstraint);
- Assert.assertTrue(specChildren.get(2) instanceof SingleConstraint);
+ assertEquals(3, specChildren.size());
+ assertTrue(specChildren.get(0) instanceof TargetConstraint);
+ assertTrue(specChildren.get(1) instanceof SingleConstraint);
+ assertTrue(specChildren.get(2) instanceof SingleConstraint);
SingleConstraintTransformer singleTransformer =
new SingleConstraintTransformer(specConstraint);
PlacementConstraint simConstraint = singleTransformer.transform();
- Assert.assertTrue(simConstraint.getConstraintExpr() instanceof Or);
+ assertTrue(simConstraint.getConstraintExpr() instanceof Or);
Or simOrExpr = (Or) specConstraint.getConstraintExpr();
for (AbstractConstraint child : simOrExpr.getChildren()) {
@@ -18,6 +18,12 @@
package org.apache.hadoop.yarn.client;
+import java.security.PrivilegedExceptionAction;
import org.apache.hadoop.security.UserGroupInformation;
@@ -29,20 +35,15 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.util.Records;
-import java.security.PrivilegedExceptionAction;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
public class TestClientRMProxy {
- public void testGetRMDelegationTokenService() {
+ void testGetRMDelegationTokenService() {
String defaultRMAddress = YarnConfiguration.DEFAULT_RM_ADDRESS;
YarnConfiguration conf = new YarnConfiguration();
@@ -51,8 +52,8 @@ public class TestClientRMProxy {
String[] services = tokenService.toString().split(",");
assertEquals(1, services.length);
for (String service : services) {
- assertTrue("Incorrect token service name",
- service.contains(defaultRMAddress));
+ assertTrue(service.contains(defaultRMAddress),
+ "Incorrect token service name");
// HA is enabled
@@ -66,13 +67,13 @@ public class TestClientRMProxy {
services = tokenService.toString().split(",");
assertEquals(2, services.length);
- public void testGetAMRMTokenService() {
+ void testGetAMRMTokenService() {
String defaultRMAddress = YarnConfiguration.DEFAULT_RM_SCHEDULER_ADDRESS;
@@ -81,8 +82,8 @@ public class TestClientRMProxy {
@@ -96,8 +97,8 @@ public class TestClientRMProxy {
@@ -109,7 +110,7 @@ public class TestClientRMProxy {
* @throws Exception an Exception occurred
- public void testProxyUserCorrectUGI() throws Exception {
+ void testProxyUserCorrectUGI() throws Exception {
final YarnConfiguration conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.RM_HA_ENABLED, true);
conf.set(YarnConfiguration.RM_HA_IDS, "rm1,rm2");
@@ -129,7 +130,7 @@ public class TestClientRMProxy {
UserGroupInformation realUser = UserGroupInformation.getCurrentUser();
UserGroupInformation proxyUser =
UserGroupInformation.createProxyUserForTesting("proxy", realUser,
- new String[] {"group1"});
+ new String[]{"group1"});
// Create the RMProxy using the proxyUser
ApplicationClientProtocol rmProxy = proxyUser.doAs(
@@ -163,7 +164,7 @@ public class TestClientRMProxy {
UGICapturingHadoopYarnProtoRPC.lastCurrentUser;
assertNotNull(lastCurrentUser);
assertEquals("proxy", lastCurrentUser.getShortUserName());
- Assert.assertEquals(UserGroupInformation.AuthenticationMethod.PROXY,
+ assertEquals(UserGroupInformation.AuthenticationMethod.PROXY,
lastCurrentUser.getAuthenticationMethod());
assertEquals(UserGroupInformation.getCurrentUser(),
lastCurrentUser.getRealUser());
@@ -187,7 +188,7 @@ public class TestClientRMProxy {
currentUser = UserGroupInformation.getCurrentUser();
} catch (IOException ioe) {
- Assert.fail("Unable to get current user\n"
+ fail("Unable to get current user\n"
+ StringUtils.stringifyException(ioe));
lastCurrentUser = currentUser;
@@ -18,22 +18,19 @@
package org.apache.hadoop.yarn.client.api.impl;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.doThrow;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.never;
-import static org.mockito.Mockito.when;
-import static org.mockito.Mockito.times;
import java.net.ConnectException;
import java.net.SocketTimeoutException;
import java.net.URI;
import java.security.PrivilegedExceptionAction;
+import com.sun.jersey.api.client.Client;
+import com.sun.jersey.api.client.ClientHandlerException;
+import com.sun.jersey.api.client.ClientResponse;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.http.HttpConfig.Policy;
@@ -42,7 +39,6 @@ import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager;
-import static org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory.SSL_MONITORING_THREAD_NAME;
import org.apache.hadoop.test.TestGenericTestUtils;
import org.apache.hadoop.yarn.api.records.timeline.TimelineDomain;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
@@ -52,14 +48,22 @@ import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier;
-import com.sun.jersey.api.client.Client;
-import com.sun.jersey.api.client.ClientHandlerException;
-import com.sun.jersey.api.client.ClientResponse;
+import static org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory.SSL_MONITORING_THREAD_NAME;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.when;
public class TestTimelineClient {
@@ -68,7 +72,7 @@ public class TestTimelineClient {
private String keystoresDir;
private String sslConfDir;
public void setup() {
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
@@ -76,7 +80,7 @@ public class TestTimelineClient {
client = createTimelineClient(conf);
public void tearDown() throws Exception {
if (client != null) {
client.stop();
@@ -87,113 +91,113 @@ public class TestTimelineClient {
- public void testPostEntities() throws Exception {
+ void testPostEntities() throws Exception {
mockEntityClientResponse(spyTimelineWriter, ClientResponse.Status.OK,
- false, false);
+ false, false);
TimelinePutResponse response = client.putEntities(generateEntity());
- Assert.assertEquals(0, response.getErrors().size());
+ assertEquals(0, response.getErrors().size());
} catch (YarnException e) {
- Assert.fail("Exception is not expected");
+ fail("Exception is not expected");
- public void testPostEntitiesWithError() throws Exception {
+ void testPostEntitiesWithError() throws Exception {
mockEntityClientResponse(spyTimelineWriter, ClientResponse.Status.OK, true,
- false);
+ false);
- Assert.assertEquals(1, response.getErrors().size());
- Assert.assertEquals("test entity id", response.getErrors().get(0)
+ assertEquals(1, response.getErrors().size());
+ assertEquals("test entity id", response.getErrors().get(0)
.getEntityId());
- Assert.assertEquals("test entity type", response.getErrors().get(0)
+ assertEquals("test entity type", response.getErrors().get(0)
.getEntityType());
- Assert.assertEquals(TimelinePutResponse.TimelinePutError.IO_EXCEPTION,
+ assertEquals(TimelinePutResponse.TimelinePutError.IO_EXCEPTION,
response.getErrors().get(0).getErrorCode());
- public void testPostIncompleteEntities() throws Exception {
+ void testPostIncompleteEntities() throws Exception {
client.putEntities(new TimelineEntity());
- Assert.fail("Exception should have been thrown");
+ fail("Exception should have been thrown");
- public void testPostEntitiesNoResponse() throws Exception {
+ void testPostEntitiesNoResponse() throws Exception {
mockEntityClientResponse(spyTimelineWriter,
- ClientResponse.Status.INTERNAL_SERVER_ERROR, false, false);
+ ClientResponse.Status.INTERNAL_SERVER_ERROR, false, false);
client.putEntities(generateEntity());
- Assert.fail("Exception is expected");
+ fail("Exception is expected");
"Failed to get the response from the timeline server."));
- public void testPostEntitiesConnectionRefused() throws Exception {
+ void testPostEntitiesConnectionRefused() throws Exception {
mockEntityClientResponse(spyTimelineWriter, null, false, true);
- Assert.fail("RuntimeException is expected");
+ fail("RuntimeException is expected");
} catch (RuntimeException re) {
- Assert.assertTrue(re instanceof ClientHandlerException);
+ assertTrue(re instanceof ClientHandlerException);
- public void testPutDomain() throws Exception {
+ void testPutDomain() throws Exception {
mockDomainClientResponse(spyTimelineWriter, ClientResponse.Status.OK, false);
client.putDomain(generateDomain());
- public void testPutDomainNoResponse() throws Exception {
+ void testPutDomainNoResponse() throws Exception {
mockDomainClientResponse(spyTimelineWriter,
ClientResponse.Status.FORBIDDEN, false);
- public void testPutDomainConnectionRefused() throws Exception {
+ void testPutDomainConnectionRefused() throws Exception {
mockDomainClientResponse(spyTimelineWriter, null, true);
- public void testCheckRetryCount() throws Exception {
+ void testCheckRetryCount() throws Exception {
conf.setInt(YarnConfiguration.TIMELINE_SERVICE_CLIENT_MAX_RETRIES,
- -2);
+ -2);
createTimelineClient(conf);
- } catch(IllegalArgumentException e) {
+ } catch (IllegalArgumentException e) {
YarnConfiguration.TIMELINE_SERVICE_CLIENT_MAX_RETRIES));
@@ -201,46 +205,46 @@ public class TestTimelineClient {
conf.setLong(YarnConfiguration.TIMELINE_SERVICE_CLIENT_RETRY_INTERVAL_MS,
- 0);
+ 0);
YarnConfiguration.TIMELINE_SERVICE_CLIENT_RETRY_INTERVAL_MS));
int newMaxRetries = 5;
long newIntervalMs = 500;
- newMaxRetries);
+ newMaxRetries);
- newIntervalMs);
+ newIntervalMs);
TimelineClientImpl client = createTimelineClient(conf);
// This call should fail because there is no timeline server
- Assert.fail("Exception expected! "
+ fail("Exception expected! "
+ "Timeline server should be off to run this test. ");
} catch (RuntimeException ce) {
- "Handler exception for reason other than retry: " + ce.getMessage(),
- ce.getMessage().contains("Connection retries limit exceeded"));
+ ce.getMessage().contains("Connection retries limit exceeded"),
+ "Handler exception for reason other than retry: " + ce.getMessage());
// we would expect this exception here, check if the client has retried
- Assert.assertTrue("Retry filter didn't perform any retries! ",
- client.connector.connectionRetry.getRetired());
+ assertTrue(client.connector.connectionRetry.getRetired(),
+ "Retry filter didn't perform any retries! ");
- public void testDelegationTokenOperationsRetry() throws Exception {
+ void testDelegationTokenOperationsRetry() throws Exception {
// use kerberos to bypass the issue in HADOOP-11215
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
@@ -260,7 +264,7 @@ public class TestTimelineClient {
// try getting a delegation token
client.getDelegationToken(
- UserGroupInformation.getCurrentUser().getShortUserName());
+ UserGroupInformation.getCurrentUser().getShortUserName());
assertFail();
assertException(client, ce);
@@ -323,7 +327,7 @@ public class TestTimelineClient {
* @throws Exception
- public void testDelegationTokenDisabledOnSimpleAuth() throws Exception {
+ void testDelegationTokenDisabledOnSimpleAuth() throws Exception {
conf.set(YarnConfiguration.TIMELINE_HTTP_AUTH_TYPE, "simple");
@@ -336,15 +340,15 @@ public class TestTimelineClient {
Token<TimelineDelegationTokenIdentifier> identifierToken =
tClient.getDelegationToken(
// Get a null token when using simple auth
- Assert.assertNull(identifierToken);
+ assertNull(identifierToken);
// try renew a delegation token
Token<TimelineDelegationTokenIdentifier> dummyToken = new Token<>();
long renewTime = tClient.renewDelegationToken(dummyToken);
// Get invalid expiration time so that RM skips renewal
- Assert.assertEquals(renewTime, -1);
+ assertEquals(renewTime, -1);
// try cancel a delegation token
tClient.cancelDelegationToken(dummyToken);
@@ -356,17 +360,16 @@ public class TestTimelineClient {
private static void assertFail() {
+ "Timeline server should be off to run this test.");
private void assertException(TimelineClientImpl client, RuntimeException ce) {
- "Handler exception for reason other than retry: " + ce.toString(), ce
- .getMessage().contains("Connection retries limit exceeded"));
+ assertTrue(ce.getMessage().contains("Connection retries limit exceeded"),
+ "Handler exception for reason other than retry: " + ce.toString());
public static ClientResponse mockEntityClientResponse(
@@ -495,7 +498,7 @@ public class TestTimelineClient {
- public void testTimelineClientCleanup() throws Exception {
+ void testTimelineClientCleanup() throws Exception {
conf.setInt(YarnConfiguration.TIMELINE_SERVICE_CLIENT_MAX_RETRIES, 0);
@@ -520,7 +523,7 @@ public class TestTimelineClient {
reloaderThread = thread;
- Assert.assertTrue("Reloader is not alive", reloaderThread.isAlive());
+ assertTrue(reloaderThread.isAlive(), "Reloader is not alive");
client.close();
@@ -532,11 +535,11 @@ public class TestTimelineClient {
Thread.sleep(1000);
- Assert.assertFalse("Reloader is still alive", reloaderStillAlive);
+ assertFalse(reloaderStillAlive, "Reloader is still alive");
- public void testTimelineConnectorDestroy() {
+ void testTimelineConnectorDestroy() {
@@ -18,20 +18,18 @@
-import static org.mockito.Mockito.reset;
import org.apache.hadoop.fs.FileContext;
@@ -42,13 +40,16 @@ import org.apache.hadoop.yarn.api.records.timeline.TimelineDomain;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
+import static org.mockito.Mockito.reset;
public class TestTimelineClientForATS1_5 {
@@ -61,7 +62,7 @@ public class TestTimelineClientForATS1_5 {
private TimelineWriter spyTimelineWriter;
private UserGroupInformation authUgi;
localFS = FileContext.getLocalFSFileContext();
localActiveDir =
@@ -85,7 +86,7 @@ public class TestTimelineClientForATS1_5 {
return conf;
@@ -94,13 +95,13 @@ public class TestTimelineClientForATS1_5 {
client = createTimelineClient(getConfigurations());
verifyForPostEntities(false);
- public void testPostEntitiesToKeepUnderUserDir() throws Exception {
+ void testPostEntitiesToKeepUnderUserDir() throws Exception {
YarnConfiguration conf = getConfigurations();
conf.setBoolean(
YarnConfiguration.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_WITH_USER_DIR,
@@ -137,7 +138,7 @@ public class TestTimelineClientForATS1_5 {
TimelineEntity[] entityTDB = new TimelineEntity[1];
entityTDB[0] = entities[0];
verify(spyTimelineWriter, times(1)).putEntities(entityTDB);
- Assert.assertTrue(localFS.util().exists(
+ assertTrue(localFS.util().exists(
new Path(getAppAttemptDir(attemptId1, storeInsideUserDir),
"summarylog-"
+ attemptId1.toString())));
@@ -152,32 +153,32 @@ public class TestTimelineClientForATS1_5 {
client.putEntities(attemptId2, groupId2, entities);
verify(spyTimelineWriter, times(0)).putEntities(
any(TimelineEntity[].class));
new Path(getAppAttemptDir(attemptId2, storeInsideUserDir),
+ attemptId2.toString())));
"entitylog-"
+ groupId.toString())));
+ groupId2.toString())));
reset(spyTimelineWriter);
- Assert.fail("Exception is not expected. " + e);
+ fail("Exception is not expected. " + e);
- public void testPutDomain() {
+ void testPutDomain() {
verifyForPutDomain(false);
- public void testPutDomainToKeepUnderUserDir() {
+ void testPutDomainToKeepUnderUserDir() {
@@ -200,12 +201,12 @@ public class TestTimelineClientForATS1_5 {
client.putDomain(attemptId1, domain);
verify(spyTimelineWriter, times(0)).putDomain(domain);
- Assert.assertTrue(localFS.util()
+ assertTrue(localFS.util()
.exists(new Path(getAppAttemptDir(attemptId1, storeInsideUserDir),
"domainlog-" + attemptId1.toString())));
- Assert.fail("Exception is not expected." + e);
+ fail("Exception is not expected." + e);
@@ -18,20 +18,19 @@
-import static org.junit.Assert.assertNull;
import javax.ws.rs.core.MultivaluedMap;
+import org.junit.jupiter.api.TestInfo;
@@ -42,12 +41,13 @@ import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
-import org.junit.Rule;
-import org.junit.rules.TestName;
public class TestTimelineClientV2Impl {
private static final Logger LOG =
@@ -56,20 +56,25 @@ public class TestTimelineClientV2Impl {
private static final long TIME_TO_SLEEP = 150L;
private static final String EXCEPTION_MSG = "Exception in the content";
- public void setup() {
+ public void setup(TestInfo testInfo) {
conf = new YarnConfiguration();
conf.setFloat(YarnConfiguration.TIMELINE_SERVICE_VERSION, 2.0f);
conf.setInt(YarnConfiguration.NUMBER_OF_ASYNC_ENTITIES_TO_MERGE, 3);
- if (!currTestName.getMethodName()
+ if (!testInfo.getDisplayName()
.contains("testRetryOnConnectionFailure")) {
- @Rule
- public TestName currTestName = new TestName();
+ void getTestInfo(TestInfo testInfo) {
+ System.out.println(testInfo.getDisplayName());
+ System.out.println(testInfo.getTestMethod());
+ System.out.println(testInfo.getTestClass());
+ System.out.println(testInfo.getTags());
private YarnConfiguration conf;
private TestV2TimelineClient createTimelineClient(YarnConfiguration config) {
@@ -116,8 +121,8 @@ public class TestTimelineClientV2Impl {
private List<TimelineEntities> publishedEntities;
public TimelineEntities getPublishedEntities(int putIndex) {
- Assert.assertTrue("Not So many entities Published",
- putIndex < publishedEntities.size());
+ assertTrue(putIndex < publishedEntities.size(),
+ "Not So many entities Published");
return publishedEntities.get(putIndex);
@@ -152,7 +157,7 @@ public class TestTimelineClientV2Impl {
- public void testExceptionMultipleRetry() {
+ void testExceptionMultipleRetry() {
TestV2TimelineClientForExceptionHandling c =
new TestV2TimelineClientForExceptionHandling(
ApplicationId.newInstance(0, 0));
@@ -165,42 +170,42 @@ public class TestTimelineClientV2Impl {
c.putEntities(new TimelineEntity());
} catch (IOException e) {
- Assert.fail("YARN exception is expected");
+ fail("YARN exception is expected");
Throwable cause = e.getCause();
- Assert.assertTrue("IOException is expected",
- cause instanceof IOException);
- Assert.assertTrue("YARN exception is expected",
- cause.getMessage().contains(
- "TimelineClient has reached to max retry times : " + maxRetries));
+ assertTrue(cause instanceof IOException,
+ "IOException is expected");
+ assertTrue(cause.getMessage().contains(
+ "TimelineClient has reached to max retry times : " + maxRetries),
+ "YARN exception is expected");
c.setThrowYarnException(true);
- cause instanceof YarnException);
- cause.getMessage().contains(EXCEPTION_MSG));
+ assertTrue(cause instanceof YarnException,
+ assertTrue(cause.getMessage().contains(EXCEPTION_MSG),
c.stop();
client.putEntities(generateEntity("1"));
- public void testASyncCallMerge() throws Exception {
+ void testASyncCallMerge() throws Exception {
client.setSleepBeforeReturn(true);
client.putEntitiesAsync(generateEntity("1"));
@@ -209,7 +214,7 @@ public class TestTimelineClientV2Impl {
client.putEntitiesAsync(generateEntity("2"));
client.putEntitiesAsync(generateEntity("3"));
for (int i = 0; i < 4; i++) {
if (client.getNumOfTimelineEntitiesPublished() == 2) {
@@ -217,20 +222,24 @@ public class TestTimelineClientV2Impl {
Thread.sleep(TIME_TO_SLEEP);
- Assert.assertEquals("two merged TimelineEntities needs to be published", 2,
- client.getNumOfTimelineEntitiesPublished());
+ client.getNumOfTimelineEntitiesPublished(),
+ "two merged TimelineEntities needs to be published");
TimelineEntities secondPublishedEntities = client.getPublishedEntities(1);
- "Merged TimelineEntities Object needs to 2 TimelineEntity Object", 2,
- secondPublishedEntities.getEntities().size());
- Assert.assertEquals("Order of Async Events Needs to be FIFO", "2",
- secondPublishedEntities.getEntities().get(0).getId());
- Assert.assertEquals("Order of Async Events Needs to be FIFO", "3",
- secondPublishedEntities.getEntities().get(1).getId());
+ 2,
+ secondPublishedEntities.getEntities().size(),
+ "Merged TimelineEntities Object needs to 2 TimelineEntity Object");
+ assertEquals("2",
+ secondPublishedEntities.getEntities().get(0).getId(),
+ "Order of Async Events Needs to be FIFO");
+ assertEquals("3",
+ secondPublishedEntities.getEntities().get(1).getId(),
- public void testSyncCall() throws Exception {
+ void testSyncCall() throws Exception {
// sync entity should not be be merged with Async
@@ -239,7 +248,7 @@ public class TestTimelineClientV2Impl {
// except for the sync call above 2 should be merged
client.putEntities(generateEntity("4"));
if (client.getNumOfTimelineEntitiesPublished() == 3) {
@@ -253,57 +262,65 @@ public class TestTimelineClientV2Impl {
int lastPublishIndex = asyncPushesMerged ? 2 : 3;
TimelineEntities firstPublishedEntities = client.getPublishedEntities(0);
- Assert.assertEquals("sync entities should not be merged with async", 1,
- firstPublishedEntities.getEntities().size());
+ assertEquals(1,
+ firstPublishedEntities.getEntities().size(),
+ "sync entities should not be merged with async");
// async push does not guarantee a merge but is FIFO
if (asyncPushesMerged) {
- "async entities should be merged before publishing sync", 2,
+ "async entities should be merged before publishing sync");
TimelineEntities secondAsyncPublish = client.getPublishedEntities(1);
- secondAsyncPublish.getEntities().get(0).getId());
+ secondAsyncPublish.getEntities().get(0).getId(),
TimelineEntities thirdAsyncPublish = client.getPublishedEntities(2);
- thirdAsyncPublish.getEntities().get(0).getId());
+ thirdAsyncPublish.getEntities().get(0).getId(),
// test the last entity published is sync put
TimelineEntities thirdPublishedEntities =
client.getPublishedEntities(lastPublishIndex);
- Assert.assertEquals("sync entities had to be published at the last", 1,
- thirdPublishedEntities.getEntities().size());
- Assert.assertEquals("Expected last sync Event is not proper", "4",
- thirdPublishedEntities.getEntities().get(0).getId());
+ thirdPublishedEntities.getEntities().size(),
+ "sync entities had to be published at the last");
+ assertEquals("4",
+ thirdPublishedEntities.getEntities().get(0).getId(),
+ "Expected last sync Event is not proper");
- public void testExceptionCalls() throws Exception {
+ void testExceptionCalls() throws Exception {
client.setThrowYarnException(true);
- Assert.fail("Async calls are not expected to throw exception");
+ fail("Async calls are not expected to throw exception");
client.putEntities(generateEntity("2"));
- Assert.fail("Sync calls are expected to throw exception");
+ fail("Sync calls are expected to throw exception");
- Assert.assertEquals("Same exception needs to be thrown",
- "ActualException", e.getCause().getMessage());
+ assertEquals("ActualException", e.getCause().getMessage(),
+ "Same exception needs to be thrown");
- public void testConfigurableNumberOfMerges() throws Exception {
+ void testConfigurableNumberOfMerges() throws Exception {
// At max 3 entities need to be merged
@@ -318,52 +335,52 @@ public class TestTimelineClientV2Impl {
client.putEntitiesAsync(generateEntity("9"));
client.putEntitiesAsync(generateEntity("10"));
- Assert.fail("No exception expected");
+ fail("No exception expected");
// not having the same logic here as it doesn't depend on how many times
// events are published.
Thread.sleep(2 * TIME_TO_SLEEP);
printReceivedEntities();
for (TimelineEntities publishedEntities : client.publishedEntities) {
+ publishedEntities.getEntities().size() <= 3,
"Number of entities should not be greater than 3 for each publish,"
- + " but was " + publishedEntities.getEntities().size(),
- publishedEntities.getEntities().size() <= 3);
+ + " but was " + publishedEntities.getEntities().size());
- public void testSetTimelineToken() throws Exception {
+ void testSetTimelineToken() throws Exception {
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
assertEquals(0, ugi.getTokens().size());
- assertNull("Timeline token in v2 client should not be set",
- client.currentTimelineToken);
+ assertNull(client.currentTimelineToken,
+ "Timeline token in v2 client should not be set");
Token token = Token.newInstance(
new byte[0], "kind", new byte[0], "service");
client.setTimelineCollectorInfo(CollectorInfo.newInstance(null, token));
- assertNull("Timeline token in v2 client should not be set as token kind " +
- "is unexepcted.", client.currentTimelineToken);
+ "Timeline token in v2 client should not be set as token kind " + "is unexepcted.");
token = Token.newInstance(new byte[0], TimelineDelegationTokenIdentifier.
KIND_NAME.toString(), new byte[0], null);
- assertNull("Timeline token in v2 client should not be set as serice is " +
- "not set.", client.currentTimelineToken);
+ "Timeline token in v2 client should not be set as serice is " + "not set.");
TimelineDelegationTokenIdentifier ident =
new TimelineDelegationTokenIdentifier(new Text(ugi.getUserName()),
- new Text("renewer"), null);
+ new Text("renewer"), null);
ident.setSequenceNumber(1);
token = Token.newInstance(ident.getBytes(),
TimelineDelegationTokenIdentifier.KIND_NAME.toString(), new byte[0],
"localhost:1234");
assertEquals(1, ugi.getTokens().size());
- assertNotNull("Timeline token should be set in v2 client.",
+ assertNotNull(client.currentTimelineToken,
+ "Timeline token should be set in v2 client.");
assertEquals(token, client.currentTimelineToken);
ident.setSequenceNumber(20);
@@ -377,7 +394,7 @@ public class TestTimelineClientV2Impl {
- public void testAfterStop() throws Exception {
+ void testAfterStop() throws Exception {
@@ -388,12 +405,12 @@ public class TestTimelineClientV2Impl {
client.putEntitiesAsync(generateEntity("50"));
- Assert.fail("Exception expected");
+ fail("Exception expected");
// expected
@@ -411,7 +428,7 @@ public class TestTimelineClientV2Impl {
client.publishedEntities.get(client.publishedEntities.size() - 1);
TimelineEntity timelineEntity = publishedEntities.getEntities()
.get(publishedEntities.getEntities().size() - 1);
- Assert.assertEquals("", "19", timelineEntity.getId());
+ assertEquals("19", timelineEntity.getId(), "");
private void printReceivedEntities() {
@@ -435,7 +452,7 @@ public class TestTimelineClientV2Impl {
return entity;
public void tearDown() {
@@ -18,30 +18,31 @@
-import static org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType.YARN_APPLICATION_ATTEMPT;
-import static org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType.YARN_CONTAINER;
+import java.net.URI;
+import javax.ws.rs.core.MultivaluedMap;
+import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap;
import com.sun.jersey.api.client.ClientResponse;
-import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
import org.apache.hadoop.yarn.client.api.TimelineReaderClient;
-import javax.ws.rs.core.MultivaluedMap;
-import java.net.URI;
+import static org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType.YARN_APPLICATION_ATTEMPT;
+import static org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType.YARN_CONTAINER;
* Test class for Timeline Reader Client.
@@ -52,7 +53,7 @@ public class TestTimelineReaderClientImpl {
"\"id\":\"appattempt_1234_0001_000001\"}";
private TimelineReaderClient client;
client = new MockTimelineReaderClient();
@@ -63,64 +64,64 @@ public class TestTimelineReaderClientImpl {
- public void testGetApplication() throws Exception {
+ void testGetApplication() throws Exception {
ApplicationId applicationId =
ApplicationId.fromString("application_1234_0001");
TimelineEntity entity = client.getApplicationEntity(applicationId,
null, null);
- Assert.assertEquals("mockApp1", entity.getId());
+ assertEquals("mockApp1", entity.getId());
- public void getApplicationAttemptEntity() throws Exception {
+ void getApplicationAttemptEntity() throws Exception {
ApplicationAttemptId attemptId =
ApplicationAttemptId.fromString("appattempt_1234_0001_000001");
TimelineEntity entity = client.getApplicationAttemptEntity(attemptId,
- Assert.assertEquals("mockAppAttempt1", entity.getId());
+ assertEquals("mockAppAttempt1", entity.getId());
- public void getApplicationAttemptEntities() throws Exception {
+ void getApplicationAttemptEntities() throws Exception {
List<TimelineEntity> entities =
client.getApplicationAttemptEntities(applicationId, null,
null, 0, null);
- Assert.assertEquals(2, entities.size());
- Assert.assertEquals("mockAppAttempt2", entities.get(1).getId());
+ assertEquals(2, entities.size());
+ assertEquals("mockAppAttempt2", entities.get(1).getId());
- public void testGetContainer() throws Exception {
+ void testGetContainer() throws Exception {
ContainerId containerId =
ContainerId.fromString("container_1234_0001_01_000001");
TimelineEntity entity = client.getContainerEntity(containerId,
- Assert.assertEquals("mockContainer1", entity.getId());
+ assertEquals("mockContainer1", entity.getId());
- public void testGetContainers() throws Exception {
+ void testGetContainers() throws Exception {
ApplicationId appId =
List<TimelineEntity> entities = client.getContainerEntities(appId,
null, null, 0, null);
- Assert.assertEquals("mockContainer2", entities.get(1).getId());
+ assertEquals("mockContainer2", entities.get(1).getId());
- public void testGetContainersForAppAttempt() throws Exception {
+ void testGetContainersForAppAttempt() throws Exception {
null, ImmutableMap.of("infofilters", appAttemptInfoFilter), 0, null);
- Assert.assertEquals("mockContainer4", entities.get(1).getId());
+ assertEquals("mockContainer4", entities.get(1).getId());
@@ -154,7 +155,7 @@ public class TestTimelineReaderClientImpl {
when(mockClientResponse.getEntity(TimelineEntity[].class)).thenReturn(
createTimelineEntities("mockContainer1", "mockContainer2"));
} else if (path.contains(YARN_CONTAINER.toString()) && params.containsKey("infofilters")) {
- Assert.assertEquals(encodeValue(appAttemptInfoFilter), params.get("infofilters").get(0));
+ assertEquals(encodeValue(appAttemptInfoFilter), params.get("infofilters").get(0));
createTimelineEntities("mockContainer3", "mockContainer4"));
} else if (path.contains(YARN_APPLICATION_ATTEMPT.toString())) {
@@ -18,20 +18,20 @@
package org.apache.hadoop.yarn.conf;
-import org.apache.hadoop.conf.Configuration;
+import java.util.Collection;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.StringUtils;
-import java.util.Collection;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.fail;
public class TestHAUtil {
private Configuration conf;
@@ -48,7 +48,7 @@ public class TestHAUtil {
private static final String RM_NODE_IDS_UNTRIMMED = RM1_NODE_ID_UNTRIMMED + "," + RM2_NODE_ID;
private static final String RM_NODE_IDS = RM1_NODE_ID + "," + RM2_NODE_ID;
public void setUp() {
conf.set(YarnConfiguration.RM_HA_IDS, RM_NODE_IDS_UNTRIMMED);
@@ -62,7 +62,7 @@ public class TestHAUtil {
- public void testGetRMServiceId() throws Exception {
+ void testGetRMServiceId() throws Exception {
conf.set(YarnConfiguration.RM_HA_IDS, RM1_NODE_ID + "," + RM2_NODE_ID);
Collection<String> rmhaIds = HAUtil.getRMHAIds(conf);
assertEquals(2, rmhaIds.size());
@@ -73,18 +73,18 @@ public class TestHAUtil {
- public void testGetRMId() throws Exception {
+ void testGetRMId() throws Exception {
conf.set(YarnConfiguration.RM_HA_ID, RM1_NODE_ID);
- assertEquals("Does not honor " + YarnConfiguration.RM_HA_ID,
- RM1_NODE_ID, HAUtil.getRMHAId(conf));
+ assertEquals(RM1_NODE_ID, HAUtil.getRMHAId(conf),
+ "Does not honor " + YarnConfiguration.RM_HA_ID);
conf.clear();
- assertNull("Return null when " + YarnConfiguration.RM_HA_ID
- + " is not set", HAUtil.getRMHAId(conf));
+ assertNull(HAUtil.getRMHAId(conf), "Return null when " + YarnConfiguration.RM_HA_ID
+ + " is not set");
- public void testVerifyAndSetConfiguration() throws Exception {
+ void testVerifyAndSetConfiguration() throws Exception {
Configuration myConf = new Configuration(conf);
@@ -93,14 +93,12 @@ public class TestHAUtil {
fail("Should not throw any exceptions.");
- assertEquals("Should be saved as Trimmed collection",
- StringUtils.getStringCollection(RM_NODE_IDS),
- HAUtil.getRMHAIds(myConf));
- assertEquals("Should be saved as Trimmed string",
- RM1_NODE_ID, HAUtil.getRMHAId(myConf));
+ assertEquals(StringUtils.getStringCollection(RM_NODE_IDS),
+ HAUtil.getRMHAIds(myConf),
+ "Should be saved as Trimmed collection");
+ assertEquals(RM1_NODE_ID, HAUtil.getRMHAId(myConf), "Should be saved as Trimmed string");
for (String confKey : YarnConfiguration.getServiceAddressConfKeys(myConf)) {
- assertEquals("RPC address not set for " + confKey,
- RM1_ADDRESS, myConf.get(confKey));
+ assertEquals(RM1_ADDRESS, myConf.get(confKey), "RPC address not set for " + confKey);
myConf = new Configuration(conf);
@@ -108,12 +106,12 @@ public class TestHAUtil {
HAUtil.verifyAndSetConfiguration(myConf);
- assertEquals("YarnRuntimeException by verifyAndSetRMHAIds()",
- HAUtil.BAD_CONFIG_MESSAGE_PREFIX +
+ assertEquals(HAUtil.BAD_CONFIG_MESSAGE_PREFIX +
HAUtil.getInvalidValueMessage(YarnConfiguration.RM_HA_IDS,
myConf.get(YarnConfiguration.RM_HA_IDS) +
- "\nHA mode requires atleast two RMs"),
- e.getMessage());
+ "\nHA mode requires atleast two RMs"),
+ e.getMessage(),
+ "YarnRuntimeException by verifyAndSetRMHAIds()");
@@ -127,10 +125,10 @@ public class TestHAUtil {
- assertEquals("YarnRuntimeException by getRMId()",
HAUtil.getNeedToSetValueMessage(YarnConfiguration.RM_HA_ID),
+ "YarnRuntimeException by getRMId()");
@@ -144,11 +142,11 @@ public class TestHAUtil {
- assertEquals("YarnRuntimeException by addSuffix()",
HAUtil.getInvalidValueMessage(YarnConfiguration.RM_HA_ID,
- RM_INVALID_NODE_ID),
+ RM_INVALID_NODE_ID),
+ "YarnRuntimeException by addSuffix()");
myConf = new Configuration();
@@ -160,11 +158,10 @@ public class TestHAUtil {
fail("Should throw YarnRuntimeException. by Configuration#set()");
String confKey =
- HAUtil.addSuffix(YarnConfiguration.RM_ADDRESS, RM1_NODE_ID);
- assertEquals("YarnRuntimeException by Configuration#set()",
- HAUtil.BAD_CONFIG_MESSAGE_PREFIX + HAUtil.getNeedToSetValueMessage(
- HAUtil.addSuffix(YarnConfiguration.RM_HOSTNAME, RM1_NODE_ID)
- + " or " + confKey), e.getMessage());
+ HAUtil.addSuffix(YarnConfiguration.RM_ADDRESS, RM1_NODE_ID);
+ assertEquals(HAUtil.BAD_CONFIG_MESSAGE_PREFIX + HAUtil.getNeedToSetValueMessage(
+ HAUtil.addSuffix(YarnConfiguration.RM_HOSTNAME, RM1_NODE_ID)
+ + " or " + confKey), e.getMessage(), "YarnRuntimeException by Configuration#set()");
// simulate the case YarnConfiguration.RM_HA_IDS doesn't contain
@@ -180,10 +177,10 @@ public class TestHAUtil {
- assertEquals("YarnRuntimeException by getRMId()'s validation",
- HAUtil.getRMHAIdNeedToBeIncludedMessage("[rm2, rm3]", RM1_NODE_ID),
+ HAUtil.getRMHAIdNeedToBeIncludedMessage("[rm2, rm3]", RM1_NODE_ID),
+ "YarnRuntimeException by getRMId()'s validation");
// simulate the case that no leader election is enabled
@@ -196,19 +193,19 @@ public class TestHAUtil {
- HAUtil.BAD_CONFIG_MESSAGE_PREFIX + HAUtil.NO_LEADER_ELECTION_MESSAGE,
+ assertEquals(HAUtil.BAD_CONFIG_MESSAGE_PREFIX + HAUtil.NO_LEADER_ELECTION_MESSAGE,
- public void testGetConfKeyForRMInstance() {
- assertTrue("RM instance id is not suffixed",
- HAUtil.getConfKeyForRMInstance(YarnConfiguration.RM_ADDRESS, conf)
- .contains(HAUtil.getRMHAId(conf)));
- assertFalse("RM instance id is suffixed",
- HAUtil.getConfKeyForRMInstance(YarnConfiguration.NM_ADDRESS, conf)
+ void testGetConfKeyForRMInstance() {
+ assertTrue(HAUtil.getConfKeyForRMInstance(YarnConfiguration.RM_ADDRESS, conf)
+ .contains(HAUtil.getRMHAId(conf)),
+ "RM instance id is not suffixed");
+ assertFalse(HAUtil.getConfKeyForRMInstance(YarnConfiguration.NM_ADDRESS, conf)
+ "RM instance id is suffixed");
@@ -18,28 +18,29 @@
-import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
+import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
public class TestYarnConfiguration {
- public void testDefaultRMWebUrl() throws Exception {
+ void testDefaultRMWebUrl() throws Exception {
String rmWebUrl = WebAppUtils.getRMWebAppURLWithScheme(conf);
// shouldn't have a "/" on the end of the url as all the other uri routinnes
// specifically add slashes and Jetty doesn't handle double slashes.
- Assert.assertNotSame("RM Web Url is not correct", "http://0.0.0.0:8088",
- rmWebUrl);
+ assertNotSame("http://0.0.0.0:8088",
+ rmWebUrl,
+ "RM Web Url is not correct");
// test it in HA scenario
@@ -47,7 +48,7 @@ public class TestYarnConfiguration {
conf.set("yarn.resourcemanager.webapp.address.rm1", "10.10.10.10:18088");
conf.set("yarn.resourcemanager.webapp.address.rm2", "20.20.20.20:28088");
String rmWebUrlinHA = WebAppUtils.getRMWebAppURLWithScheme(conf);
- Assert.assertEquals("http://10.10.10.10:18088", rmWebUrlinHA);
+ assertEquals("http://10.10.10.10:18088", rmWebUrlinHA);
YarnConfiguration conf2 = new YarnConfiguration();
conf2.setBoolean(YarnConfiguration.RM_HA_ENABLED, true);
@@ -55,17 +56,17 @@ public class TestYarnConfiguration {
conf2.set("yarn.resourcemanager.hostname.rm1", "30.30.30.30");
conf2.set("yarn.resourcemanager.hostname.rm2", "40.40.40.40");
String rmWebUrlinHA2 = WebAppUtils.getRMWebAppURLWithScheme(conf2);
- Assert.assertEquals("http://30.30.30.30:8088", rmWebUrlinHA2);
+ assertEquals("http://30.30.30.30:8088", rmWebUrlinHA2);
rmWebUrlinHA2 = WebAppUtils.getRMWebAppURLWithScheme(conf2, 0);
rmWebUrlinHA2 = WebAppUtils.getRMWebAppURLWithScheme(conf2, 1);
- Assert.assertEquals("http://40.40.40.40:8088", rmWebUrlinHA2);
+ assertEquals("http://40.40.40.40:8088", rmWebUrlinHA2);
- public void testRMWebUrlSpecified() throws Exception {
+ void testRMWebUrlSpecified() throws Exception {
// seems a bit odd but right now we are forcing webapp for RM to be
// RM_ADDRESS
@@ -74,15 +75,15 @@ public class TestYarnConfiguration {
conf.set(YarnConfiguration.RM_ADDRESS, "rmtesting:9999");
String[] parts = rmWebUrl.split(":");
- Assert.assertEquals("RM Web URL Port is incrrect", 24543,
- Integer.parseInt(parts[parts.length - 1]));
- Assert.assertNotSame(
- "RM Web Url not resolved correctly. Should not be rmtesting",
- "http://rmtesting:24543", rmWebUrl);
+ assertEquals(24543,
+ Integer.parseInt(parts[parts.length - 1]),
+ "RM Web URL Port is incrrect");
+ assertNotSame("http://rmtesting:24543", rmWebUrl,
+ "RM Web Url not resolved correctly. Should not be rmtesting");
- public void testGetSocketAddressForNMWithHA() {
+ void testGetSocketAddressForNMWithHA() {
// Set NM address
@@ -100,7 +101,7 @@ public class TestYarnConfiguration {
- public void testGetSocketAddr() throws Exception {
+ void testGetSocketAddr() throws Exception {
YarnConfiguration conf;
InetSocketAddress resourceTrackerAddress;
@@ -113,9 +114,9 @@ public class TestYarnConfiguration {
YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_ADDRESS,
YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_PORT);
assertEquals(
- new InetSocketAddress(
- YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_ADDRESS.split(":")[0],
- YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_PORT),
+ new InetSocketAddress(
+ YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_ADDRESS.split(":")[0],
+ YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_PORT),
resourceTrackerAddress);
//with address
@@ -126,9 +127,9 @@ public class TestYarnConfiguration {
- "10.0.0.1",
+ "10.0.0.1",
//address and socket
@@ -139,9 +140,9 @@ public class TestYarnConfiguration {
- "10.0.0.2",
- 5001),
+ "10.0.0.2",
+ 5001),
//bind host only
@@ -153,9 +154,9 @@ public class TestYarnConfiguration {
- "10.0.0.3",
+ "10.0.0.3",
//bind host and address no port
@@ -167,9 +168,9 @@ public class TestYarnConfiguration {
- "0.0.0.0",
+ "0.0.0.0",
//bind host and address with port
@@ -181,15 +182,15 @@ public class TestYarnConfiguration {
- 5003),
+ 5003),
- public void testUpdateConnectAddr() throws Exception {
+ void testUpdateConnectAddr() throws Exception {
InetSocketAddress resourceTrackerConnectAddress;
InetSocketAddress serverAddress;
@@ -20,9 +20,6 @@ package org.apache.hadoop.yarn.event;
-import org.apache.hadoop.yarn.event.AsyncDispatcher;
-import org.apache.hadoop.yarn.event.Event;
-import org.apache.hadoop.yarn.event.EventHandler;
@SuppressWarnings({"unchecked", "rawtypes"})
public class InlineDispatcher extends AsyncDispatcher {
@@ -27,23 +27,30 @@ import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
+import org.junit.jupiter.api.Timeout;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricsRecord;
import org.apache.hadoop.metrics2.impl.MetricsCollectorImpl;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.test.GenericTestUtils;
-import org.apache.hadoop.yarn.metrics.GenericEventTypeMetrics;
+import org.apache.hadoop.yarn.metrics.GenericEventTypeMetrics;
import static org.apache.hadoop.metrics2.lib.Interns.info;
-import static org.mockito.Mockito.*;
+import static org.mockito.Mockito.atLeastOnce;
public class TestAsyncDispatcher {
@@ -52,9 +59,10 @@ public class TestAsyncDispatcher {
* 1. A thread which was putting event to event queue is interrupted.
* 2. Event queue is empty on close.
- @SuppressWarnings({ "unchecked", "rawtypes" })
- @Test(timeout=10000)
- public void testDispatcherOnCloseIfQueueEmpty() throws Exception {
+ @SuppressWarnings({"unchecked", "rawtypes"})
+ @Timeout(10000)
+ void testDispatcherOnCloseIfQueueEmpty() throws Exception {
BlockingQueue<Event> eventQueue = spy(new LinkedBlockingQueue<Event>());
Event event = mock(Event.class);
doThrow(new InterruptedException()).when(eventQueue).put(event);
@@ -66,19 +74,20 @@ public class TestAsyncDispatcher {
disp.waitForEventThreadToWait();
disp.getEventHandler().handle(event);
- Assert.fail("Expected YarnRuntimeException");
+ fail("Expected YarnRuntimeException");
- Assert.assertTrue(e.getCause() instanceof InterruptedException);
+ assertTrue(e.getCause() instanceof InterruptedException);
// Queue should be empty and dispatcher should not hang on close
- Assert.assertTrue("Event Queue should have been empty",
- eventQueue.isEmpty());
+ assertTrue(eventQueue.isEmpty(),
+ "Event Queue should have been empty");
disp.close();
// Test dispatcher should timeout on draining events.
- public void testDispatchStopOnTimeout() throws Exception {
+ void testDispatchStopOnTimeout() throws Exception {
BlockingQueue<Event> eventQueue = new LinkedBlockingQueue<Event>();
eventQueue = spy(eventQueue);
// simulate dispatcher is not drained.
@@ -143,9 +152,10 @@ public class TestAsyncDispatcher {
// Test if drain dispatcher drains events on stop.
- @SuppressWarnings({ "rawtypes" })
- public void testDrainDispatcherDrainEventsOnStop() throws Exception {
+ @SuppressWarnings({"rawtypes"})
+ void testDrainDispatcherDrainEventsOnStop() throws Exception {
conf.setInt(YarnConfiguration.DISPATCHER_DRAIN_EVENTS_TIMEOUT, 2000);
BlockingQueue<Event> queue = new LinkedBlockingQueue<Event>();
@@ -161,11 +171,12 @@ public class TestAsyncDispatcher {
//Test print dispatcher details when the blocking queue is heavy
- @Test(timeout = 10000)
- public void testPrintDispatcherEventDetails() throws Exception {
+ void testPrintDispatcherEventDetails() throws Exception {
conf.setInt(YarnConfiguration.
- YARN_DISPATCHER_PRINT_EVENTS_INFO_THRESHOLD, 5000);
+ YARN_DISPATCHER_PRINT_EVENTS_INFO_THRESHOLD, 5000);
Logger log = mock(Logger.class);
AsyncDispatcher dispatcher = new AsyncDispatcher();
dispatcher.init(conf);
@@ -190,7 +201,7 @@ public class TestAsyncDispatcher {
Thread.sleep(2000);
//Make sure more than one event to take
verify(log, atLeastOnce()).
- info("Latest dispatch event type: TestEventType");
+ info("Latest dispatch event type: TestEventType");
//... restore logger object
logger.set(null, oldLog);
@@ -199,8 +210,9 @@ public class TestAsyncDispatcher {
- @Test(timeout = 60000)
- public void testPrintDispatcherEventDetailsAvoidDeadLoop() throws Exception {
+ @Timeout(60000)
+ void testPrintDispatcherEventDetailsAvoidDeadLoop() throws Exception {
for (int i = 0; i < 5; i++) {
testPrintDispatcherEventDetailsAvoidDeadLoopInternal();
@@ -241,7 +253,7 @@ public class TestAsyncDispatcher {
- public void testMetricsForDispatcher() throws Exception {
+ void testMetricsForDispatcher() throws Exception {
AsyncDispatcher dispatcher = null;
@@ -252,7 +264,7 @@ public class TestAsyncDispatcher {
new GenericEventTypeMetrics.EventTypeMetricsBuilder()
.setMs(DefaultMetricsSystem.instance())
.setInfo(info("GenericEventTypeMetrics for "
- + TestEnum.class.getName(),
+ + TestEnum.class.getName(),
"Metrics for " + dispatcher.getName()))
.setEnumClass(TestEnum.class)
.setEnums(TestEnum.class.getEnumConstants())
@@ -287,34 +299,34 @@ public class TestAsyncDispatcher {
get(TestEnum.TestEventType2) == 2, 1000, 10000);
// Check time spend.
- Assert.assertTrue(genericEventTypeMetrics.
+ assertTrue(genericEventTypeMetrics.
getTotalProcessingTime(TestEnum.TestEventType)
- >= 1500*3);
+ >= 1500 * 3);
- < 1500*4);
+ < 1500 * 4);
getTotalProcessingTime(TestEnum.TestEventType2)
- >= 1500*2);
+ >= 1500 * 2);
- < 1500*3);
+ < 1500 * 3);
// Make sure metrics consistent.
- Assert.assertEquals(Long.toString(genericEventTypeMetrics.
+ assertEquals(Long.toString(genericEventTypeMetrics.
get(TestEnum.TestEventType)),
genericEventTypeMetrics.
getRegistry().get("TestEventType_event_count").toString());
get(TestEnum.TestEventType2)),
getRegistry().get("TestEventType2_event_count").toString());
getTotalProcessingTime(TestEnum.TestEventType)),
getRegistry().get("TestEventType_processing_time").toString());
getTotalProcessingTime(TestEnum.TestEventType2)),
getRegistry().get("TestEventType2_processing_time").toString());
@@ -326,7 +338,7 @@ public class TestAsyncDispatcher {
- public void testDispatcherMetricsHistogram() throws Exception {
+ void testDispatcherMetricsHistogram() throws Exception {
@@ -337,7 +349,7 @@ public class TestAsyncDispatcher {
@@ -393,14 +405,13 @@ public class TestAsyncDispatcher {
String metricName = metric.name();
if (expectedValues.containsKey(metricName)) {
Long expectedValue = expectedValues.get(metricName);
- "Metric " + metricName + " doesn't have expected value",
- expectedValue, metric.value());
+ assertEquals(expectedValue, metric.value(),
+ "Metric " + metricName + " doesn't have expected value");
testResults.add(metricName);
- Assert.assertEquals(expectedValues.keySet(), testResults);
+ assertEquals(expectedValues.keySet(), testResults);
dispatcher.close();
@@ -18,11 +18,12 @@
package org.apache.hadoop.yarn.factories.impl.pb;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.mock;
@@ -33,7 +34,7 @@ import static org.mockito.Mockito.verify;
public class TestRpcClientFactoryPBImpl {
- public void testToUseCustomClassloader() throws Exception {
+ void testToUseCustomClassloader() throws Exception {
Configuration configuration = mock(Configuration.class);
RpcClientFactoryPBImpl rpcClientFactoryPB = RpcClientFactoryPBImpl.get();
public class TestRpcServerFactoryPBImpl {
RpcServerFactoryPBImpl rpcServerFactoryPB = RpcServerFactoryPBImpl.get();
@@ -21,71 +21,71 @@ package org.apache.hadoop.yarn.ipc;
import java.io.FileNotFoundException;
+import org.apache.hadoop.thirdparty.protobuf.ServiceException;
import org.apache.hadoop.ipc.RemoteException;
-import org.apache.hadoop.thirdparty.protobuf.ServiceException;
public class TestRPCUtil {
- public void testUnknownExceptionUnwrapping() {
+ void testUnknownExceptionUnwrapping() {
Class<? extends Throwable> exception = YarnException.class;
String className = "UnknownException.class";
verifyRemoteExceptionUnwrapping(exception, className);
- public void testRemoteIOExceptionUnwrapping() {
+ void testRemoteIOExceptionUnwrapping() {
Class<? extends Throwable> exception = IOException.class;
verifyRemoteExceptionUnwrapping(exception, exception.getName());
- public void testRemoteIOExceptionDerivativeUnwrapping() {
+ void testRemoteIOExceptionDerivativeUnwrapping() {
// Test IOException sub-class
Class<? extends Throwable> exception = FileNotFoundException.class;
- public void testRemoteYarnExceptionUnwrapping() {
+ void testRemoteYarnExceptionUnwrapping() {
- public void testRemoteYarnExceptionDerivativeUnwrapping() {
+ void testRemoteYarnExceptionDerivativeUnwrapping() {
Class<? extends Throwable> exception = YarnTestException.class;
- public void testRemoteRuntimeExceptionUnwrapping() {
+ void testRemoteRuntimeExceptionUnwrapping() {
Class<? extends Throwable> exception = NullPointerException.class;
- public void testUnexpectedRemoteExceptionUnwrapping() {
+ void testUnexpectedRemoteExceptionUnwrapping() {
// Non IOException, YarnException thrown by the remote side.
Class<? extends Throwable> exception = Exception.class;
verifyRemoteExceptionUnwrapping(RemoteException.class, exception.getName());
- public void testRemoteYarnExceptionWithoutStringConstructor() {
+ void testRemoteYarnExceptionWithoutStringConstructor() {
// Derivatives of YarnException should always define a string constructor.
Class<? extends Throwable> exception = YarnTestExceptionNoConstructor.class;
- public void testRPCServiceExceptionUnwrapping() {
+ void testRPCServiceExceptionUnwrapping() {
String message = "ServiceExceptionMessage";
ServiceException se = new ServiceException(message);
@@ -96,12 +96,12 @@ public class TestRPCUtil {
t = thrown;
- Assert.assertTrue(IOException.class.isInstance(t));
- Assert.assertTrue(t.getMessage().contains(message));
+ assertTrue(IOException.class.isInstance(t));
+ assertTrue(t.getMessage().contains(message));
- public void testRPCIOExceptionUnwrapping() {
+ void testRPCIOExceptionUnwrapping() {
String message = "DirectIOExceptionMessage";
IOException ioException = new FileNotFoundException(message);
ServiceException se = new ServiceException(ioException);
@@ -112,12 +112,12 @@ public class TestRPCUtil {
} catch (Throwable thrown) {
- Assert.assertTrue(FileNotFoundException.class.isInstance(t));
+ assertTrue(FileNotFoundException.class.isInstance(t));
- public void testRPCRuntimeExceptionUnwrapping() {
+ void testRPCRuntimeExceptionUnwrapping() {
String message = "RPCRuntimeExceptionUnwrapping";
RuntimeException re = new NullPointerException(message);
ServiceException se = new ServiceException(re);
@@ -129,8 +129,8 @@ public class TestRPCUtil {
- Assert.assertTrue(NullPointerException.class.isInstance(t));
+ assertTrue(NullPointerException.class.isInstance(t));
private void verifyRemoteExceptionUnwrapping(
@@ -147,11 +147,10 @@ public class TestRPCUtil {
- Assert.assertTrue("Expected exception [" + expectedLocalException
- + "] but found " + t, expectedLocalException.isInstance(t));
- "Expected message [" + message + "] but found " + t.getMessage(), t
- .getMessage().contains(message));
+ assertTrue(expectedLocalException.isInstance(t), "Expected exception [" + expectedLocalException
+ + "] but found " + t);
+ assertTrue(t.getMessage().contains(message),
+ "Expected message [" + message + "] but found " + t.getMessage());
private static class YarnTestException extends YarnException {
@@ -18,13 +18,13 @@
package org.apache.hadoop.yarn.logaggregation;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.yarn.logaggregation.filecontroller.LogAggregationFileController;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.LOG_AGGREGATION_FILE_CONTROLLER_FMT;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.LOG_AGGREGATION_REMOTE_APP_LOG_DIR_FMT;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.LOG_AGGREGATION_REMOTE_APP_LOG_DIR_SUFFIX_FMT;
@@ -18,6 +18,15 @@
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hadoop.fs.FileSystem;
@@ -32,14 +41,6 @@ import org.apache.hadoop.yarn.logaggregation.testutils.LogAggregationTestcase;
import org.apache.hadoop.yarn.logaggregation.testutils.LogAggregationTestcaseBuilder;
import org.apache.hadoop.yarn.logaggregation.testutils.LogAggregationTestcaseBuilder.AppDescriptor;
import org.apache.log4j.Level;
import static org.apache.hadoop.yarn.logaggregation.LogAggregationTestUtils.enableFileControllers;
@@ -64,12 +65,12 @@ public class TestAggregatedLogDeletionService {
LogAggregationTFileController.class);
public static final List<String> ALL_FILE_CONTROLLER_NAMES = Arrays.asList(I_FILE, T_FILE);
public static void beforeClass() {
org.apache.log4j.Logger.getRootLogger().setLevel(Level.DEBUG);
public void closeFilesystems() throws IOException {
// prevent the same mockfs instance from being reused due to FS cache
FileSystem.closeAll();
@@ -91,7 +92,7 @@ public class TestAggregatedLogDeletionService {
- public void testDeletion() throws Exception {
+ void testDeletion() throws Exception {
long now = System.currentTimeMillis();
long toDeleteTime = now - (2000 * 1000);
long toKeepTime = now - (1500 * 1000);
@@ -99,36 +100,36 @@ public class TestAggregatedLogDeletionService {
Configuration conf = setupConfiguration(1800, -1);
long timeout = 2000L;
LogAggregationTestcaseBuilder.create(conf)
- .withRootPath(ROOT)
- .withRemoteRootLogPath(REMOTE_ROOT_LOG_DIR)
- .withUserDir(USER_ME, toKeepTime)
- .withSuffixDir(SUFFIX, toDeleteTime)
- .withBucketDir(toDeleteTime)
- .withApps(Lists.newArrayList(
- new AppDescriptor(toDeleteTime, Lists.newArrayList()),
- new AppDescriptor(toDeleteTime, Lists.newArrayList(
- Pair.of(DIR_HOST1, toDeleteTime),
- Pair.of(DIR_HOST2, toKeepTime))),
- Pair.of(DIR_HOST2, toDeleteTime))),
- Pair.of(DIR_HOST2, toKeepTime)))))
- .withFinishedApps(1, 2, 3)
- .withRunningApps(4)
- .injectExceptionForAppDirDeletion(3)
- .build()
- .startDeletionService()
- .verifyAppDirsDeleted(timeout, 1, 3)
- .verifyAppDirsNotDeleted(timeout, 2, 4)
- .verifyAppFileDeleted(4, 1, timeout)
- .verifyAppFileNotDeleted(4, 2, timeout)
- .teardown(1);
+ .withRootPath(ROOT)
+ .withRemoteRootLogPath(REMOTE_ROOT_LOG_DIR)
+ .withUserDir(USER_ME, toKeepTime)
+ .withSuffixDir(SUFFIX, toDeleteTime)
+ .withBucketDir(toDeleteTime)
+ .withApps(Lists.newArrayList(
+ new AppDescriptor(toDeleteTime, Lists.newArrayList()),
+ new AppDescriptor(toDeleteTime, Lists.newArrayList(
+ Pair.of(DIR_HOST1, toDeleteTime),
+ Pair.of(DIR_HOST2, toKeepTime))),
+ Pair.of(DIR_HOST2, toDeleteTime))),
+ Pair.of(DIR_HOST2, toKeepTime)))))
+ .withFinishedApps(1, 2, 3)
+ .withRunningApps(4)
+ .injectExceptionForAppDirDeletion(3)
+ .build()
+ .startDeletionService()
+ .verifyAppDirsDeleted(timeout, 1, 3)
+ .verifyAppDirsNotDeleted(timeout, 2, 4)
+ .verifyAppFileDeleted(4, 1, timeout)
+ .verifyAppFileNotDeleted(4, 2, timeout)
+ .teardown(1);
- public void testRefreshLogRetentionSettings() throws Exception {
+ void testRefreshLogRetentionSettings() throws Exception {
long before2000Secs = now - (2000 * 1000);
long before50Secs = now - (50 * 1000);
@@ -138,51 +139,51 @@ public class TestAggregatedLogDeletionService {
Configuration conf = setupConfiguration(1800, 1);
LogAggregationTestcase testcase = LogAggregationTestcaseBuilder.create(conf)
- .withUserDir(USER_ME, before50Secs)
- .withSuffixDir(SUFFIX, before50Secs)
- .withBucketDir(before50Secs)
- //Set time last modified of app1Dir directory and its files to before2000Secs
- new AppDescriptor(before2000Secs, Lists.newArrayList(
- Pair.of(DIR_HOST1, before2000Secs))),
- //Set time last modified of app1Dir directory and its files to before50Secs
- new AppDescriptor(before50Secs, Lists.newArrayList(
- Pair.of(DIR_HOST1, before50Secs))))
- )
- .withFinishedApps(1, 2)
- .withRunningApps()
- .build();
+ .withUserDir(USER_ME, before50Secs)
+ .withSuffixDir(SUFFIX, before50Secs)
+ .withBucketDir(before50Secs)
+ //Set time last modified of app1Dir directory and its files to before2000Secs
+ new AppDescriptor(before2000Secs, Lists.newArrayList(
+ Pair.of(DIR_HOST1, before2000Secs))),
+ //Set time last modified of app1Dir directory and its files to before50Secs
+ new AppDescriptor(before50Secs, Lists.newArrayList(
+ Pair.of(DIR_HOST1, before50Secs))))
+ )
+ .withFinishedApps(1, 2)
+ .withRunningApps()
+ .build();
testcase
- //app1Dir would be deleted since it is done above log retention period
- .verifyAppDirDeleted(1, 10000L)
- //app2Dir is not expected to be deleted since it is below the threshold
- .verifyAppDirNotDeleted(2, 3000L);
+ //app1Dir would be deleted since it is done above log retention period
+ .verifyAppDirDeleted(1, 10000L)
+ //app2Dir is not expected to be deleted since it is below the threshold
+ .verifyAppDirNotDeleted(2, 3000L);
//Now, let's change the log aggregation retention configs
conf.setInt(YarnConfiguration.LOG_AGGREGATION_RETAIN_SECONDS, 50);
conf.setInt(YarnConfiguration.LOG_AGGREGATION_RETAIN_CHECK_INTERVAL_SECONDS,
- checkIntervalSeconds);
+ checkIntervalSeconds);
- //We have not called refreshLogSettings, hence don't expect to see
- // the changed conf values
- .verifyCheckIntervalMilliSecondsNotEqualTo(checkIntervalMilliSeconds)
- //refresh the log settings
- .refreshLogRetentionSettings()
- //Check interval time should reflect the new value
- .verifyCheckIntervalMilliSecondsEqualTo(checkIntervalMilliSeconds)
- //app2Dir should be deleted since it falls above the threshold
- .verifyAppDirDeleted(2, 10000L)
- //Close expected 2 times: once for refresh and once for stopping
- .teardown(2);
+ //We have not called refreshLogSettings, hence don't expect to see
+ // the changed conf values
+ .verifyCheckIntervalMilliSecondsNotEqualTo(checkIntervalMilliSeconds)
+ //refresh the log settings
+ .refreshLogRetentionSettings()
+ //Check interval time should reflect the new value
+ .verifyCheckIntervalMilliSecondsEqualTo(checkIntervalMilliSeconds)
+ //app2Dir should be deleted since it falls above the threshold
+ .verifyAppDirDeleted(2, 10000L)
+ //Close expected 2 times: once for refresh and once for stopping
+ .teardown(2);
- public void testCheckInterval() throws Exception {
+ void testCheckInterval() throws Exception {
long toDeleteTime = now - TEN_DAYS_IN_SECONDS * 1000;
@@ -192,32 +193,32 @@ public class TestAggregatedLogDeletionService {
- .withUserDir(USER_ME, now)
- .withSuffixDir(SUFFIX, now)
- .withBucketDir(now)
- new AppDescriptor(now,
- Lists.newArrayList(Pair.of(DIR_HOST1, now))),
- new AppDescriptor(now)))
- .withFinishedApps(1)
- .verifyAnyPathListedAtLeast(4, 10000L)
- .verifyAppDirNotDeleted(1, NO_TIMEOUT)
- // modify the timestamp of the logs and verify if it is picked up quickly
- .changeModTimeOfApp(1, toDeleteTime)
- .changeModTimeOfAppLogDir(1, 1, toDeleteTime)
- .changeModTimeOfBucketDir(toDeleteTime)
- .reinitAllPaths()
+ .withUserDir(USER_ME, now)
+ .withSuffixDir(SUFFIX, now)
+ .withBucketDir(now)
+ new AppDescriptor(now,
+ Lists.newArrayList(Pair.of(DIR_HOST1, now))),
+ new AppDescriptor(now)))
+ .withFinishedApps(1)
+ .verifyAnyPathListedAtLeast(4, 10000L)
+ .verifyAppDirNotDeleted(1, NO_TIMEOUT)
+ // modify the timestamp of the logs and verify if it is picked up quickly
+ .changeModTimeOfApp(1, toDeleteTime)
+ .changeModTimeOfAppLogDir(1, 1, toDeleteTime)
+ .changeModTimeOfBucketDir(toDeleteTime)
+ .reinitAllPaths()
- public void testRobustLogDeletion() throws Exception {
+ void testRobustLogDeletion() throws Exception {
Configuration conf = setupConfiguration(TEN_DAYS_IN_SECONDS, 1);
// prevent us from picking up the same mockfs instance from another test
@@ -225,26 +226,26 @@ public class TestAggregatedLogDeletionService {
long modTime = 0L;
- .withUserDir(USER_ME, modTime)
- .withSuffixDir(SUFFIX, modTime)
- .withBucketDir(modTime, "0")
- new AppDescriptor(modTime),
- new AppDescriptor(modTime, Lists.newArrayList(Pair.of(DIR_HOST1, modTime)))))
- .withAdditionalAppDirs(Lists.newArrayList(Pair.of("application_a", modTime)))
- .withFinishedApps(1, 3)
- .injectExceptionForAppDirDeletion(1)
- .runDeletionTask(TEN_DAYS_IN_SECONDS)
- .verifyAppDirDeleted(3, NO_TIMEOUT);
+ .withUserDir(USER_ME, modTime)
+ .withSuffixDir(SUFFIX, modTime)
+ .withBucketDir(modTime, "0")
+ new AppDescriptor(modTime),
+ new AppDescriptor(modTime, Lists.newArrayList(Pair.of(DIR_HOST1, modTime)))))
+ .withAdditionalAppDirs(Lists.newArrayList(Pair.of("application_a", modTime)))
+ .withFinishedApps(1, 3)
+ .injectExceptionForAppDirDeletion(1)
+ .runDeletionTask(TEN_DAYS_IN_SECONDS)
+ .verifyAppDirDeleted(3, NO_TIMEOUT);
- public void testDeletionTwoControllers() throws IOException {
+ void testDeletionTwoControllers() throws IOException {
@@ -252,48 +253,48 @@ public class TestAggregatedLogDeletionService {
enableFileControllers(conf, REMOTE_ROOT_LOG_DIR, ALL_FILE_CONTROLLERS,
- ALL_FILE_CONTROLLER_NAMES);
+ ALL_FILE_CONTROLLER_NAMES);
- .withBothFileControllers()
- .withApps(//Apps for TFile
- Lists.newArrayList(
- new AppDescriptor(T_FILE, toDeleteTime, Lists.newArrayList()),
- new AppDescriptor(T_FILE, toDeleteTime, Lists.newArrayList(
- //Apps for IFile
- new AppDescriptor(I_FILE, toDeleteTime, Lists.newArrayList()),
- new AppDescriptor(I_FILE, toDeleteTime, Lists.newArrayList(
- .withFinishedApps(1, 2, 3, 5, 6, 7)
- .withRunningApps(4, 8)
- .injectExceptionForAppDirDeletion(3, 6)
- .verifyAppDirsDeleted(timeout, 1, 3, 5, 7)
- .verifyAppDirsNotDeleted(timeout, 2, 4, 6, 8)
- .verifyAppFilesDeleted(timeout, Lists.newArrayList(Pair.of(4, 1), Pair.of(8, 1)))
- .verifyAppFilesNotDeleted(timeout, Lists.newArrayList(Pair.of(4, 2), Pair.of(8, 2)))
+ .withBothFileControllers()
+ .withApps(//Apps for TFile
+ Lists.newArrayList(
+ new AppDescriptor(T_FILE, toDeleteTime, Lists.newArrayList()),
+ new AppDescriptor(T_FILE, toDeleteTime, Lists.newArrayList(
+ //Apps for IFile
+ new AppDescriptor(I_FILE, toDeleteTime, Lists.newArrayList()),
+ new AppDescriptor(I_FILE, toDeleteTime, Lists.newArrayList(
+ .withFinishedApps(1, 2, 3, 5, 6, 7)
+ .withRunningApps(4, 8)
+ .injectExceptionForAppDirDeletion(3, 6)
+ .verifyAppDirsDeleted(timeout, 1, 3, 5, 7)
+ .verifyAppDirsNotDeleted(timeout, 2, 4, 6, 8)
+ .verifyAppFilesDeleted(timeout, Lists.newArrayList(Pair.of(4, 1), Pair.of(8, 1)))
+ .verifyAppFilesNotDeleted(timeout, Lists.newArrayList(Pair.of(4, 2), Pair.of(8, 2)))
static class MockFileSystem extends FilterFileSystem {
@@ -18,10 +18,6 @@
import java.io.BufferedReader;
import java.io.DataInputStream;
@@ -41,9 +37,14 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.concurrent.CountDownLatch;
+import org.junit.jupiter.api.Assumptions;
import org.apache.hadoop.fs.FileStatus;
@@ -62,10 +63,15 @@ import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogReader;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogValue;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogWriter;
import org.apache.hadoop.yarn.util.Times;
-import org.junit.Assume;
public class TestAggregatedLogFormat {
@@ -84,8 +90,8 @@ public class TestAggregatedLogFormat {
public void cleanupTestDir() throws Exception {
Path workDirPath = new Path(testWorkDir.getAbsolutePath());
LOG.info("Cleaning test directory [" + workDirPath + "]");
@@ -97,7 +103,7 @@ public class TestAggregatedLogFormat {
//appending to logs
- public void testForCorruptedAggregatedLogs() throws Exception {
+ void testForCorruptedAggregatedLogs() throws Exception {
File workDir = new File(testWorkDir, "testReadAcontainerLogs1");
Path remoteAppLogFile =
@@ -112,7 +118,7 @@ public class TestAggregatedLogFormat {
long numChars = 950000;
writeSrcFileAndALog(srcFilePath, "stdout", numChars, remoteAppLogFile,
- srcFileRoot, testContainerId);
+ srcFileRoot, testContainerId);
LogReader logReader = new LogReader(conf, remoteAppLogFile);
LogKey rLogKey = new LogKey();
@@ -121,8 +127,8 @@ public class TestAggregatedLogFormat {
LogReader.readAcontainerLogs(dis, writer);
- if(e.toString().contains("NumberFormatException")) {
- Assert.fail("Aggregated logs are corrupted.");
+ if (e.toString().contains("NumberFormatException")) {
+ fail("Aggregated logs are corrupted.");
@@ -134,10 +140,10 @@ public class TestAggregatedLogFormat {
// Trying to read a corrupted log file created above should cause
// log reading to fail below with an IOException.
logReader = new LogReader(conf, remoteAppLogFile);
- Assert.fail("Expect IOException from reading corrupt aggregated logs.");
+ fail("Expect IOException from reading corrupt aggregated logs.");
DataInputStream dIS = logReader.next(rLogKey);
- Assert.assertNull("Input stream not available for reading", dIS);
+ assertNull(dIS, "Input stream not available for reading");
@@ -198,7 +204,7 @@ public class TestAggregatedLogFormat {
- public void testReadAcontainerLogs1() throws Exception {
+ void testReadAcontainerLogs1() throws Exception {
//Verify the output generated by readAContainerLogs(DataInputStream, Writer, logUploadedTime)
testReadAcontainerLog(true);
@@ -250,12 +256,10 @@ public class TestAggregatedLogFormat {
logWriter.append(logKey, spyLogValue);
// make sure permission are correct on the file
- FileStatus fsStatus = fs.getFileStatus(remoteAppLogFile);
- Assert.assertEquals("permissions on log aggregation file are wrong",
- FsPermission.createImmutable((short) 0640), fsStatus.getPermission());
+ FileStatus fsStatus = fs.getFileStatus(remoteAppLogFile);
+ assertEquals(FsPermission.createImmutable((short) 0640), fsStatus.getPermission(),
+ "permissions on log aggregation file are wrong");
DataInputStream dis = logReader.next(rLogKey);
@@ -283,24 +287,24 @@ public class TestAggregatedLogFormat {
+ numChars + ("\n").length() + ("End of LogType:stdout"
+ System.lineSeparator() + System.lineSeparator()).length();
- Assert.assertTrue("LogType not matched", s.contains("LogType:stdout"));
- Assert.assertTrue("log file:stderr should not be aggregated.", !s.contains("LogType:stderr"));
- Assert.assertTrue("log file:logs should not be aggregated.", !s.contains("LogType:logs"));
- Assert.assertTrue("LogLength not matched", s.contains("LogLength:" + numChars));
- Assert.assertTrue("Log Contents not matched", s.contains("Log Contents"));
+ assertTrue(s.contains("LogType:stdout"), "LogType not matched");
+ assertTrue(!s.contains("LogType:stderr"), "log file:stderr should not be aggregated.");
+ assertTrue(!s.contains("LogType:logs"), "log file:logs should not be aggregated.");
+ assertTrue(s.contains("LogLength:" + numChars), "LogLength not matched");
+ assertTrue(s.contains("Log Contents"), "Log Contents not matched");
StringBuilder sb = new StringBuilder();
for (int i = 0 ; i < numChars ; i++) {
sb.append(filler);
String expectedContent = sb.toString();
- Assert.assertTrue("Log content incorrect", s.contains(expectedContent));
+ assertTrue(s.contains(expectedContent), "Log content incorrect");
- Assert.assertEquals(expectedLength, s.length());
+ assertEquals(expectedLength, s.length());
- public void testZeroLengthLog() throws IOException {
+ void testZeroLengthLog() throws IOException {
File workDir = new File(testWorkDir, "testZeroLength");
Path remoteAppLogFile = new Path(workDir.getAbsolutePath(),
@@ -332,17 +336,18 @@ public class TestAggregatedLogFormat {
Writer writer = new StringWriter();
- Assert.assertEquals("LogType:stdout\n" +
+ assertEquals("LogType:stdout\n" +
"LogLength:0\n" +
"Log Contents:\n\n" +
"End of LogType:stdout\n\n", writer.toString());
- public void testContainerLogsFileAccess() throws IOException {
+ void testContainerLogsFileAccess() throws IOException {
// This test will run only if NativeIO is enabled as SecureIOUtils
// require it to be enabled.
- Assume.assumeTrue(NativeIO.isAvailable());
+ Assumptions.assumeTrue(NativeIO.isAvailable());
"kerberos");
@@ -415,28 +420,28 @@ public class TestAggregatedLogFormat {
String stdoutFile1 =
StringUtils.join(
File.separator,
- Arrays.asList(new String[] {
+ Arrays.asList(new String[]{
workDir.getAbsolutePath(), "srcFiles",
testContainerId1.getApplicationAttemptId().getApplicationId()
- .toString(), testContainerId1.toString(), stderr }));
+ .toString(), testContainerId1.toString(), stderr}));
// The file: stdout is expected to be aggregated.
String stdoutFile2 =
- .toString(), testContainerId1.toString(), stdout }));
+ .toString(), testContainerId1.toString(), stdout}));
String message2 =
"Owner '" + expectedOwner + "' for path "
+ stdoutFile2 + " did not match expected owner '"
+ ugi.getShortUserName() + "'";
- Assert.assertFalse(line.contains(message2));
- Assert.assertFalse(line.contains(data + testContainerId1.toString()
+ assertFalse(line.contains(message2));
+ assertFalse(line.contains(data + testContainerId1.toString()
+ stderr));
- Assert.assertTrue(line.contains(data + testContainerId1.toString()
+ assertTrue(line.contains(data + testContainerId1.toString()
+ stdout));
@@ -28,9 +28,11 @@ import java.util.Arrays;
import javax.servlet.http.HttpServletRequest;
+import com.google.inject.Inject;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.fs.FileUtil;
@@ -49,19 +51,17 @@ import org.apache.hadoop.yarn.logaggregation.filecontroller.LogAggregationFileCo
import org.apache.hadoop.yarn.logaggregation.filecontroller.LogAggregationFileControllerContext;
import org.apache.hadoop.yarn.logaggregation.filecontroller.LogAggregationFileControllerFactory;
import org.apache.hadoop.yarn.logaggregation.filecontroller.tfile.TFileAggregatedLogsBlock;
-import org.apache.hadoop.yarn.webapp.YarnWebParams;
import org.apache.hadoop.yarn.webapp.View.ViewContext;
+import org.apache.hadoop.yarn.webapp.YarnWebParams;
import org.apache.hadoop.yarn.webapp.log.AggregatedLogsBlockForTest;
import org.apache.hadoop.yarn.webapp.view.BlockForTest;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
import org.apache.hadoop.yarn.webapp.view.HtmlBlockForTest;
-import com.google.inject.Inject;
* Test AggregatedLogsBlock. AggregatedLogsBlock should check user, aggregate a
@@ -73,7 +73,7 @@ public class TestAggregatedLogsBlock {
* Bad user. User 'owner' is trying to read logs without access
- public void testAccessDenied() throws Exception {
+ void testAccessDenied() throws Exception {
FileUtil.fullyDelete(new File("target/logs"));
Configuration configuration = getConfiguration();
@@ -89,7 +89,7 @@ public class TestAggregatedLogsBlock {
HtmlBlock.Block block = new BlockForTest(html, printWriter, 10, false);
TFileAggregatedLogsBlockForTest aggregatedBlock
= getTFileAggregatedLogsBlockForTest(configuration, "owner",
- "container_0_0001_01_000001", "localhost:1234");
+ "container_0_0001_01_000001", "localhost:1234");
aggregatedBlock.render(block);
block.getWriter().flush();
@@ -100,7 +100,7 @@ public class TestAggregatedLogsBlock {
- public void testBlockContainsPortNumForUnavailableAppLog() {
+ void testBlockContainsPortNumForUnavailableAppLog() {
@@ -125,7 +125,7 @@ public class TestAggregatedLogsBlock {
- public void testBadLogs() throws Exception {
+ void testBadLogs() throws Exception {
@@ -146,8 +146,8 @@ public class TestAggregatedLogsBlock {
String out = data.toString();
assertTrue(out
.contains("Logs not available for entity. Aggregation may not be "
- + "complete, Check back later or try to find the container logs "
- + "in the local directory of nodemanager localhost:1234"));
+ + "complete, Check back later or try to find the container logs "
+ + "in the local directory of nodemanager localhost:1234"));
.contains("Or see application log at http://localhost:8042"));
@@ -160,7 +160,7 @@ public class TestAggregatedLogsBlock {
- public void testAggregatedLogsBlock() throws Exception {
+ void testAggregatedLogsBlock() throws Exception {
@@ -175,7 +175,7 @@ public class TestAggregatedLogsBlock {
= getTFileAggregatedLogsBlockForTest(configuration, "admin",
@@ -192,7 +192,7 @@ public class TestAggregatedLogsBlock {
- public void testAggregatedLogsBlockHar() throws Exception {
+ void testAggregatedLogsBlockHar() throws Exception {
@@ -209,7 +209,7 @@ public class TestAggregatedLogsBlock {
- "container_1440536969523_0001_01_000001", "host1:1111");
+ "container_1440536969523_0001_01_000001", "host1:1111");
@@ -238,7 +238,7 @@ public class TestAggregatedLogsBlock {
- public void testNoLogs() throws Exception {
+ void testNoLogs() throws Exception {
@@ -255,7 +255,7 @@ public class TestAggregatedLogsBlock {
@@ -17,8 +17,6 @@
import java.io.FileWriter;
@@ -27,6 +25,7 @@ import java.util.ArrayList;
@@ -39,6 +38,8 @@ import org.apache.hadoop.yarn.logaggregation.filecontroller.LogAggregationFileCo
* This class contains several utility functions for log aggregation tests.
@@ -18,19 +18,6 @@
-import org.apache.commons.lang3.tuple.ImmutablePair;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.RemoteIterator;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.logaggregation.filecontroller.FakeLogAggregationFileController;
import java.time.Clock;
@@ -42,7 +29,23 @@ import java.util.Map;
import java.util.stream.Collectors;
+import org.apache.commons.lang3.tuple.ImmutablePair;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.RemoteIterator;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.logaggregation.filecontroller.FakeLogAggregationFileController;
public class TestLogAggregationMetaCollector {
private static final String TEST_NODE = "TEST_NODE_1";
@@ -133,17 +136,17 @@ public class TestLogAggregationMetaCollector {
public void setUp() throws Exception {
fileController = createFileController();
- public void testAllNull() throws IOException {
+ void testAllNull() throws IOException {
ExtendedLogMetaRequest.ExtendedLogMetaRequestBuilder request =
new ExtendedLogMetaRequest.ExtendedLogMetaRequestBuilder();
request.setAppId(null);
@@ -165,7 +168,7 @@ public class TestLogAggregationMetaCollector {
- public void testAllSet() throws IOException {
+ void testAllSet() throws IOException {
Set<String> fileSizeExpressions = new HashSet<>();
@@ -191,7 +194,7 @@ public class TestLogAggregationMetaCollector {
- public void testSingleNodeRequest() throws IOException {
+ void testSingleNodeRequest() throws IOException {
@@ -214,7 +217,7 @@ public class TestLogAggregationMetaCollector {
- public void testMultipleNodeRegexRequest() throws IOException {
+ void testMultipleNodeRegexRequest() throws IOException {
@@ -236,7 +239,7 @@ public class TestLogAggregationMetaCollector {
- public void testMultipleFileRegex() throws IOException {
+ void testMultipleFileRegex() throws IOException {
@@ -260,7 +263,7 @@ public class TestLogAggregationMetaCollector {
- public void testContainerIdExactMatch() throws IOException {
+ void testContainerIdExactMatch() throws IOException {
@@ -284,7 +287,7 @@ public class TestLogAggregationMetaCollector {
- public void testMultipleFileBetweenSize() throws IOException {
+ void testMultipleFileBetweenSize() throws IOException {
@@ -311,7 +314,7 @@ public class TestLogAggregationMetaCollector {
- public void testInvalidQueryStrings() throws IOException {
+ void testInvalidQueryStrings() throws IOException {
@@ -18,6 +18,11 @@
package org.apache.hadoop.yarn.logaggregation.filecontroller;
+import java.io.OutputStream;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
@@ -28,11 +33,6 @@ import org.apache.hadoop.yarn.logaggregation.ContainerLogsRequest;
import org.apache.hadoop.yarn.webapp.View;
-import java.io.OutputStream;
public class FakeLogAggregationFileController
extends LogAggregationFileController {
@@ -18,6 +18,13 @@
+import java.io.FileNotFoundException;
+import org.mockito.ArgumentMatcher;
+import org.mockito.Mockito;
@@ -25,15 +32,9 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
-import org.mockito.ArgumentMatcher;
-import org.mockito.Mockito;
-import java.io.FileNotFoundException;
import static org.apache.hadoop.yarn.logaggregation.filecontroller.LogAggregationFileController.TLDIR_PERMISSIONS;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.argThat;
import static org.mockito.ArgumentMatchers.eq;
@@ -49,19 +50,19 @@ import static org.mockito.Mockito.verify;
public class TestLogAggregationFileController {
- public void testRemoteDirCreationDefault() throws Exception {
+ void testRemoteDirCreationDefault() throws Exception {
FileSystem fs = mock(FileSystem.class);
doReturn(new URI("")).when(fs).getUri();
doThrow(FileNotFoundException.class).when(fs)
- .getFileStatus(any(Path.class));
+ .getFileStatus(any(Path.class));
LogAggregationFileController controller = mock(
- LogAggregationFileController.class, Mockito.CALLS_REAL_METHODS);
+ LogAggregationFileController.class, Mockito.CALLS_REAL_METHODS);
doReturn(fs).when(controller).getFileSystem(any(Configuration.class));
UserGroupInformation ugi = UserGroupInformation.createUserForTesting(
- "yarn_user", new String[] {"yarn_group", "other_group"});
+ "yarn_user", new String[]{"yarn_group", "other_group"});
UserGroupInformation.setLoginUser(ugi);
controller.initialize(conf, "TFile");
@@ -71,7 +72,7 @@ public class TestLogAggregationFileController {
- public void testRemoteDirCreationWithCustomGroup() throws Exception {
+ void testRemoteDirCreationWithCustomGroup() throws Exception {
String testGroupName = "testGroup";
@@ -86,7 +87,7 @@ public class TestLogAggregationFileController {
@@ -114,7 +115,7 @@ public class TestLogAggregationFileController {
- public void testRemoteDirCreationWithCustomUser() throws Exception {
+ void testRemoteDirCreationWithCustomUser() throws Exception {
doReturn(new FileStatus(128, false, 0, 64, System.currentTimeMillis(),
@@ -139,6 +140,6 @@ public class TestLogAggregationFileController {
verify(fs).setPermission(argThat(new PathContainsString(".permission_check")),
eq(new FsPermission(TLDIR_PERMISSIONS)));
verify(fs).delete(argThat(new PathContainsString(".permission_check")), eq(false));
- Assert.assertTrue(controller.fsSupportsChmod);
+ assertTrue(controller.fsSupportsChmod);
@@ -18,6 +18,21 @@
+import java.io.File;
+import java.io.FileWriter;
+import java.io.Writer;
import org.apache.hadoop.conf.Configured;
@@ -33,25 +48,14 @@ import org.apache.hadoop.yarn.logaggregation.filecontroller.ifile.LogAggregation
import org.apache.hadoop.yarn.logaggregation.filecontroller.tfile.LogAggregationTFileController;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block;
-import java.io.File;
-import java.io.FileWriter;
-import java.io.Writer;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.LOG_AGGREGATION_FILE_FORMATS;
import static org.apache.hadoop.yarn.logaggregation.LogAggregationTestUtils.REMOTE_LOG_ROOT;
+import static org.junit.jupiter.api.Assertions.assertThrows;
* Test LogAggregationFileControllerFactory.
@@ -79,7 +83,7 @@ public class TestLogAggregationFileControllerFactory extends Configured {
private ApplicationId appId = ApplicationId.newInstance(
System.currentTimeMillis(), 1);
public void setup() throws IOException {
conf.setBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, true);
@@ -107,100 +111,106 @@ public class TestLogAggregationFileControllerFactory extends Configured {
new FileWriter(new File(logPath.toString(), "testLog"))) {
writer.write("test");
- assertTrue("The used LogAggregationFileController is not instance of "
- + className.getSimpleName(), className.isInstance(
- factory.getFileControllerForRead(appId, APP_OWNER)));
+ assertTrue(className.isInstance(factory.getFileControllerForRead(appId, APP_OWNER)),
+ "The used LogAggregationFileController is not instance of " + className.getSimpleName());
fs.delete(logPath, true);
- public void testDefaultLogAggregationFileControllerFactory()
+ void testDefaultLogAggregationFileControllerFactory()
throws IOException {
LogAggregationFileControllerFactory factory =
new LogAggregationFileControllerFactory(getConf());
List<LogAggregationFileController> list = factory
.getConfiguredLogAggregationFileControllerList();
- assertEquals("Only one LogAggregationFileController is expected!", 1,
- list.size());
- assertTrue("TFile format is expected to be the first " +
- "LogAggregationFileController!", list.get(0) instanceof
- LogAggregationTFileController);
- assertTrue("TFile format is expected to be used for writing!",
- factory.getFileControllerForWrite() instanceof
+ list.size(),
+ "Only one LogAggregationFileController is expected!");
+ assertTrue(list.get(0) instanceof
+ LogAggregationTFileController, "TFile format is expected to be the first " +
+ "LogAggregationFileController!");
+ assertTrue(factory.getFileControllerForWrite() instanceof
+ LogAggregationTFileController,
+ "TFile format is expected to be used for writing!");
verifyFileControllerInstance(factory, LogAggregationTFileController.class);
- @Test(expected = Exception.class)
- public void testLogAggregationFileControllerFactoryClassNotSet() {
- Configuration conf = getConf();
- conf.set(LOG_AGGREGATION_FILE_FORMATS, "TestLogAggregationFileController");
- new LogAggregationFileControllerFactory(conf);
- fail("TestLogAggregationFileController's class was not set, " +
- "but the factory creation did not fail.");
+ void testLogAggregationFileControllerFactoryClassNotSet() {
+ assertThrows(Exception.class, () -> {
+ Configuration conf = getConf();
+ conf.set(LOG_AGGREGATION_FILE_FORMATS, "TestLogAggregationFileController");
+ new LogAggregationFileControllerFactory(conf);
+ fail("TestLogAggregationFileController's class was not set, " +
+ "but the factory creation did not fail.");
+ });
- public void testLogAggregationFileControllerFactory() throws Exception {
+ void testLogAggregationFileControllerFactory() throws Exception {
enableFileControllers(getConf(), ALL_FILE_CONTROLLERS, ALL_FILE_CONTROLLER_NAMES);
List<LogAggregationFileController> list =
factory.getConfiguredLogAggregationFileControllerList();
- assertEquals("The expected number of LogAggregationFileController " +
- "is not 3!", 3, list.size());
- assertTrue("Test format is expected to be the first " +
- TestLogAggregationFileController);
- assertTrue("IFile format is expected to be the second " +
- "LogAggregationFileController!", list.get(1) instanceof
- LogAggregationIndexedFileController);
- "LogAggregationFileController!", list.get(2) instanceof
- assertTrue("Test format is expected to be used for writing!",
+ assertEquals(3, list.size(), "The expected number of LogAggregationFileController " +
+ "is not 3!");
+ TestLogAggregationFileController, "Test format is expected to be the first " +
+ assertTrue(list.get(1) instanceof
+ LogAggregationIndexedFileController, "IFile format is expected to be the second " +
+ assertTrue(list.get(2) instanceof
+ TestLogAggregationFileController,
+ "Test format is expected to be used for writing!");
verifyFileControllerInstance(factory,
TestLogAggregationFileController.class);
- public void testClassConfUsed() {
+ void testClassConfUsed() {
enableFileControllers(getConf(), Collections.singletonList(LogAggregationTFileController.class),
Collections.singletonList("TFile"));
LogAggregationFileController fc = factory.getFileControllerForWrite();
- assertEquals(WRONG_ROOT_LOG_DIR_MSG, "target/app-logs/TFile",
- fc.getRemoteRootLogDir().toString());
- assertEquals(WRONG_ROOT_LOG_DIR_SUFFIX_MSG, "TFile",
- fc.getRemoteRootLogDirSuffix());
+ assertEquals("target/app-logs/TFile",
+ fc.getRemoteRootLogDir().toString(),
+ WRONG_ROOT_LOG_DIR_MSG);
+ assertEquals("TFile",
+ fc.getRemoteRootLogDirSuffix(),
+ WRONG_ROOT_LOG_DIR_SUFFIX_MSG);
- public void testNodemanagerConfigurationIsUsed() {
+ void testNodemanagerConfigurationIsUsed() {
Configuration conf = getConf();
conf.set(LOG_AGGREGATION_FILE_FORMATS, "TFile");
new LogAggregationFileControllerFactory(conf);
- assertEquals(WRONG_ROOT_LOG_DIR_MSG, "target/app-logs/default",
- assertEquals(WRONG_ROOT_LOG_DIR_SUFFIX_MSG, "log-tfile",
+ assertEquals("target/app-logs/default",
+ assertEquals("log-tfile",
- public void testDefaultConfUsed() {
+ void testDefaultConfUsed() {
conf.unset(YarnConfiguration.NM_REMOTE_APP_LOG_DIR);
conf.unset(YarnConfiguration.NM_REMOTE_APP_LOG_DIR_SUFFIX);
@@ -210,10 +220,12 @@ public class TestLogAggregationFileControllerFactory extends Configured {
- assertEquals(WRONG_ROOT_LOG_DIR_MSG, "/tmp/logs",
- assertEquals(WRONG_ROOT_LOG_DIR_SUFFIX_MSG, "logs-tfile",
+ assertEquals("/tmp/logs",
+ assertEquals("logs-tfile",
private static class TestLogAggregationFileController
@@ -32,6 +32,12 @@ import java.util.HashSet;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -48,27 +54,24 @@ import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey;
+import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogValue;
+import org.apache.hadoop.yarn.logaggregation.ContainerLogFileInfo;
import org.apache.hadoop.yarn.logaggregation.ContainerLogMeta;
import org.apache.hadoop.yarn.logaggregation.ContainerLogsRequest;
import org.apache.hadoop.yarn.logaggregation.ExtendedLogMetaRequest;
import org.apache.hadoop.yarn.logaggregation.LogAggregationUtils;
-import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey;
-import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogValue;
-import org.apache.hadoop.yarn.logaggregation.ContainerLogFileInfo;
import org.apache.hadoop.yarn.util.Clock;
import org.apache.hadoop.yarn.util.ControlledClock;
import static org.mockito.Mockito.when;
@@ -104,7 +107,7 @@ public class TestLogAggregationIndexedFileController
public void setUp() throws IOException {
setConf(getTestConf());
appId = ApplicationId.newInstance(123456, 1);
@@ -122,14 +125,15 @@ public class TestLogAggregationIndexedFileController
System.setErr(sysErr);
public void teardown() throws Exception {
fs.delete(rootLocalLogDirPath, true);
fs.delete(new Path(remoteLogDir), true);
- @Test(timeout = 15000)
- public void testLogAggregationIndexFileFormat() throws Exception {
+ @Timeout(15000)
+ void testLogAggregationIndexFileFormat() throws Exception {
if (fs.exists(rootLocalLogDirPath)) {
@@ -150,7 +154,7 @@ public class TestLogAggregationIndexedFileController
LogKey key1 = new LogKey(containerId.toString());
- for(String logType : logTypes) {
+ for (String logType : logTypes) {
File file = createAndWriteLocalLogFile(containerId, appLogsDir,
logType);
files.add(file);
@@ -162,24 +166,23 @@ public class TestLogAggregationIndexedFileController
final ControlledClock clock = new ControlledClock();
clock.setTime(System.currentTimeMillis());
- LogAggregationIndexedFileController fileFormat
- = new LogAggregationIndexedFileController() {
- private int rollOverCheck = 0;
- @Override
- public Clock getSystemClock() {
- return clock;
- }
- public boolean isRollover(final FileContext fc,
- final Path candidate) throws IOException {
- rollOverCheck++;
- if (rollOverCheck >= 3) {
- return true;
- return false;
- };
+ LogAggregationIndexedFileController fileFormat = new LogAggregationIndexedFileController() {
+ private int rollOverCheck = 0;
+ @Override
+ public Clock getSystemClock() {
+ return clock;
+ public boolean isRollover(final FileContext fc, final Path candidate) throws IOException {
+ rollOverCheck++;
+ if (rollOverCheck >= 3) {
+ return true;
+ return false;
+ };
fileFormat.initialize(getConf(), "Indexed");
@@ -238,7 +241,7 @@ public class TestLogAggregationIndexedFileController
factoryConf.set("yarn.log-aggregation.file-formats", "Indexed");
factoryConf.set("yarn.log-aggregation.file-controller.Indexed.class",
"org.apache.hadoop.yarn.logaggregation.filecontroller.ifile"
- + ".LogAggregationIndexedFileController");
+ + ".LogAggregationIndexedFileController");
new LogAggregationFileControllerFactory(factoryConf);
LogAggregationFileController fileController = factory
@@ -255,9 +258,9 @@ public class TestLogAggregationIndexedFileController
// create a checksum file
Path checksumFile = new Path(fileFormat.getRemoteAppLogDir(
- appId, USER_UGI.getShortUserName()),
+ appId, USER_UGI.getShortUserName()),
LogAggregationUtils.getNodeString(nodeId)
- + LogAggregationIndexedFileController.CHECK_SUM_FILE_SUFFIX);
+ + LogAggregationIndexedFileController.CHECK_SUM_FILE_SUFFIX);
FSDataOutputStream fInput = null;
String nodeName = logPath.getName() + "_" + clock.getTime();
@@ -330,7 +333,7 @@ public class TestLogAggregationIndexedFileController
fileFormat.postWrite(context);
fileFormat.closeWriter();
meta = fileFormat.readAggregatedLogsMeta(
- logRequest);
+ logRequest);
assertThat(meta.size()).isEqualTo(2);
for (ContainerLogMeta log : meta) {
assertEquals(containerId.toString(), log.getContainerId());
@@ -380,8 +383,9 @@ public class TestLogAggregationIndexedFileController
sysOutStream.reset();
- public void testFetchApplictionLogsHar() throws Exception {
+ void testFetchApplictionLogsHar() throws Exception {
List<String> newLogTypes = new ArrayList<>();
newLogTypes.add("syslog");
newLogTypes.add("stdout");
@@ -472,7 +476,7 @@ public class TestLogAggregationIndexedFileController
- public void testGetRollOverLogMaxSize() {
+ void testGetRollOverLogMaxSize() {
String fileControllerName = "testController";
String remoteDirConf = String.format(
YarnConfiguration.LOG_AGGREGATION_REMOTE_APP_LOG_DIR_FMT,
@@ -500,7 +504,7 @@ public class TestLogAggregationIndexedFileController
- public void testGetLogMetaFilesOfNode() throws Exception {
+ void testGetLogMetaFilesOfNode() throws Exception {
@@ -521,7 +525,7 @@ public class TestLogAggregationIndexedFileController
@@ -566,7 +570,7 @@ public class TestLogAggregationIndexedFileController
+ LogAggregationIndexedFileController.CHECK_SUM_FILE_SUFFIX);
@@ -593,11 +597,11 @@ public class TestLogAggregationIndexedFileController
if (node.getPath().getName().contains(
LogAggregationIndexedFileController.CHECK_SUM_FILE_SUFFIX)) {
- assertTrue("Checksum node files should not contain any logs",
- metas.isEmpty());
+ assertTrue(metas.isEmpty(),
+ "Checksum node files should not contain any logs");
- assertFalse("Non-checksum node files should contain log files",
+ assertFalse(metas.isEmpty(),
+ "Non-checksum node files should contain log files");
assertEquals(4, metas.values().stream().findFirst().get().size());
@@ -18,14 +18,14 @@
package org.apache.hadoop.yarn.logaggregation.testutils;
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogDeletionService;
import static org.apache.hadoop.yarn.logaggregation.testutils.MockRMClientUtils.createMockRMClient;
public class AggregatedLogDeletionServiceForTest extends AggregatedLogDeletionService {
@@ -18,6 +18,19 @@
+import java.io.Closeable;
+import java.util.HashSet;
+import java.util.Objects;
+import java.util.stream.Collectors;
@@ -30,26 +43,20 @@ import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogDeletionService.LogDeletionTask;
-import java.io.Closeable;
-import java.util.HashSet;
-import java.util.Objects;
-import java.util.Set;
-import java.util.stream.Collectors;
-import static org.apache.hadoop.yarn.logaggregation.testutils.FileStatusUtils.*;
+import static org.apache.hadoop.yarn.logaggregation.testutils.FileStatusUtils.createDirBucketDirLogPathWithFileStatus;
+import static org.apache.hadoop.yarn.logaggregation.testutils.FileStatusUtils.createDirLogPathWithFileStatus;
+import static org.apache.hadoop.yarn.logaggregation.testutils.FileStatusUtils.createFileLogPathWithFileStatus;
+import static org.apache.hadoop.yarn.logaggregation.testutils.FileStatusUtils.createPathWithFileStatusForAppId;
import static org.apache.hadoop.yarn.logaggregation.testutils.LogAggregationTestcaseBuilder.NO_TIMEOUT;
+import static org.mockito.Mockito.timeout;
public class LogAggregationTestcase {
private static final Logger LOG = LoggerFactory.getLogger(LogAggregationTestcase.class);
import org.apache.commons.compress.utils.Lists;
@@ -27,12 +33,6 @@ import org.apache.hadoop.security.AccessControlException;
import static org.apache.hadoop.yarn.logaggregation.TestAggregatedLogDeletionService.ALL_FILE_CONTROLLER_NAMES;
public class LogAggregationTestcaseBuilder {
import org.apache.hadoop.test.MockitoUtil;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest;
@@ -26,8 +28,6 @@ import org.apache.hadoop.yarn.api.records.ApplicationId;
@@ -25,19 +25,23 @@ import java.util.Map;
import java.util.Map.Entry;
public class NodeLabelTestBase {
public static void assertMapEquals(Map<NodeId, Set<String>> expected,
ImmutableMap<NodeId, Set<String>> actual) {
- Assert.assertEquals(expected.size(), actual.size());
+ assertEquals(expected.size(), actual.size());
for (NodeId k : expected.keySet()) {
- Assert.assertTrue(actual.containsKey(k));
+ assertTrue(actual.containsKey(k));
assertCollectionEquals(expected.get(k), actual.get(k));
@@ -45,9 +49,9 @@ public class NodeLabelTestBase {
public static void assertLabelInfoMapEquals(
Map<NodeId, Set<NodeLabel>> expected,
ImmutableMap<NodeId, Set<NodeLabel>> actual) {
assertNLCollectionEquals(expected.get(k), actual.get(k));
@@ -55,13 +59,13 @@ public class NodeLabelTestBase {
public static void assertLabelsToNodesEquals(
Map<String, Set<NodeId>> expected,
ImmutableMap<String, Set<NodeId>> actual) {
for (String k : expected.keySet()) {
Set<NodeId> expectedS1 = new HashSet<>(expected.get(k));
Set<NodeId> actualS2 = new HashSet<>(actual.get(k));
- Assert.assertEquals(expectedS1, actualS2);
- Assert.assertTrue(expectedS1.containsAll(actualS2));
+ assertEquals(expectedS1, actualS2);
+ assertTrue(expectedS1.containsAll(actualS2));
@@ -86,7 +90,7 @@ public class NodeLabelTestBase {
public static void assertMapContains(Map<NodeId, Set<String>> expected,
for (NodeId k : actual.keySet()) {
- Assert.assertTrue(expected.containsKey(k));
+ assertTrue(expected.containsKey(k));
@@ -94,28 +98,28 @@ public class NodeLabelTestBase {
public static void assertCollectionEquals(Collection<String> expected,
Collection<String> actual) {
if (expected == null) {
- Assert.assertNull(actual);
+ assertNull(actual);
- Assert.assertNotNull(actual);
+ assertNotNull(actual);
Set<String> expectedSet = new HashSet<>(expected);
Set<String> actualSet = new HashSet<>(actual);
- Assert.assertEquals(expectedSet, actualSet);
- Assert.assertTrue(expectedSet.containsAll(actualSet));
+ assertEquals(expectedSet, actualSet);
+ assertTrue(expectedSet.containsAll(actualSet));
public static void assertNLCollectionEquals(Collection<NodeLabel> expected,
Collection<NodeLabel> actual) {
Set<NodeLabel> expectedSet = new HashSet<>(expected);
Set<NodeLabel> actualSet = new HashSet<>(actual);
@SuppressWarnings("unchecked")
@@ -150,13 +154,13 @@ public class NodeLabelTestBase {
public static void assertLabelsInfoToNodesEquals(
Map<NodeLabel, Set<NodeId>> expected,
ImmutableMap<NodeLabel, Set<NodeId>> actual) {
for (NodeLabel k : expected.keySet()) {
@@ -18,8 +18,6 @@
package org.apache.hadoop.yarn.nodelabels;
@@ -27,24 +25,31 @@ import java.util.HashSet;
public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
DummyCommonNodeLabelsManager mgr = null;
public void before() {
mgr = new DummyCommonNodeLabelsManager();
@@ -53,13 +58,14 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
mgr.start();
public void after() {
mgr.stop();
- @Test(timeout = 5000)
- public void testAddRemovelabel() throws Exception {
+ @Timeout(5000)
+ void testAddRemovelabel() throws Exception {
// Add some label
mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("hello"));
verifyNodeLabelAdded(Sets.newHashSet("hello"), mgr.lastAddedlabels);
@@ -68,23 +74,23 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("hello1", "world1"));
verifyNodeLabelAdded(Sets.newHashSet("hello1", "world1"), mgr.lastAddedlabels);
- Assert.assertTrue(mgr.getClusterNodeLabelNames().containsAll(
+ assertTrue(mgr.getClusterNodeLabelNames().containsAll(
Sets.newHashSet("hello", "world", "hello1", "world1")));
mgr.addToCluserNodeLabels(Arrays.asList(NodeLabel.newInstance("hello1",
false)));
- Assert.fail("IOException not thrown on exclusivity change of labels");
+ fail("IOException not thrown on exclusivity change of labels");
- Assert.assertTrue("IOException is expected when exclusivity is modified",
- e instanceof IOException);
+ assertTrue(e instanceof IOException,
+ "IOException is expected when exclusivity is modified");
true)));
- Assert.assertFalse(
- "IOException not expected when no change in exclusivity",
+ assertFalse(
+ e instanceof IOException,
+ "IOException not expected when no change in exclusivity");
// try to remove null, empty and non-existed label, should fail
for (String p : Arrays.asList(null, CommonNodeLabelsManager.NO_LABEL, "xx")) {
@@ -94,42 +100,45 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
caught = true;
- Assert.assertTrue("remove label should fail "
- + "when label is null/empty/non-existed", caught);
+ assertTrue(caught, "remove label should fail "
+ + "when label is null/empty/non-existed");
// Remove some label
mgr.removeFromClusterNodeLabels(Arrays.asList("hello"));
assertCollectionEquals(Sets.newHashSet("hello"), mgr.lastRemovedlabels);
Arrays.asList("world", "hello1", "world1")));
mgr.removeFromClusterNodeLabels(Arrays
.asList("hello1", "world1", "world"));
- Assert.assertTrue(mgr.lastRemovedlabels.containsAll(Sets.newHashSet(
+ assertTrue(mgr.lastRemovedlabels.containsAll(Sets.newHashSet(
"hello1", "world1", "world")));
- Assert.assertTrue(mgr.getClusterNodeLabelNames().isEmpty());
+ assertTrue(mgr.getClusterNodeLabelNames().isEmpty());
- public void testAddlabelWithCase() throws Exception {
+ void testAddlabelWithCase() throws Exception {
// Add some label, case will not ignore here
mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("HeLlO"));
verifyNodeLabelAdded(Sets.newHashSet("HeLlO"), mgr.lastAddedlabels);
- Assert.assertFalse(mgr.getClusterNodeLabelNames().containsAll(
+ assertFalse(mgr.getClusterNodeLabelNames().containsAll(
Arrays.asList("hello")));
- public void testAddlabelWithExclusivity() throws Exception {
+ void testAddlabelWithExclusivity() throws Exception {
mgr.addToCluserNodeLabels(Arrays.asList(NodeLabel.newInstance("a", false), NodeLabel.newInstance("b", true)));
- Assert.assertFalse(mgr.isExclusiveNodeLabel("a"));
- Assert.assertTrue(mgr.isExclusiveNodeLabel("b"));
+ assertFalse(mgr.isExclusiveNodeLabel("a"));
+ assertTrue(mgr.isExclusiveNodeLabel("b"));
- public void testAddInvalidlabel() throws IOException {
+ void testAddInvalidlabel() throws IOException {
boolean caught = false;
Set<String> set = new HashSet<String>();
@@ -138,7 +147,7 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
- Assert.assertTrue("null label should not add to repo", caught);
+ assertTrue(caught, "null label should not add to repo");
caught = false;
@@ -147,7 +156,7 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
- Assert.assertTrue("empty label should not add to repo", caught);
+ assertTrue(caught, "empty label should not add to repo");
@@ -155,7 +164,7 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
- Assert.assertTrue("invalid label character should not add to repo", caught);
+ assertTrue(caught, "invalid label character should not add to repo");
@@ -163,7 +172,7 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
- Assert.assertTrue("too long label should not add to repo", caught);
+ assertTrue(caught, "too long label should not add to repo");
@@ -171,7 +180,7 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
- Assert.assertTrue("label cannot start with \"-\"", caught);
+ assertTrue(caught, "label cannot start with \"-\"");
@@ -179,28 +188,29 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
- Assert.assertTrue("label cannot start with \"_\"", caught);
+ assertTrue(caught, "label cannot start with \"_\"");
mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("a^aabbb"));
- Assert.assertTrue("label cannot contains other chars like ^[] ...", caught);
+ assertTrue(caught, "label cannot contains other chars like ^[] ...");
mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("aa[a]bbb"));
- public void testAddReplaceRemoveLabelsOnNodes() throws Exception {
+ void testAddReplaceRemoveLabelsOnNodes() throws Exception {
// set a label on a node, but label doesn't exist
@@ -208,8 +218,8 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
- Assert.assertTrue("trying to set a label to a node but "
- + "label doesn't exist in repository should fail", caught);
+ assertTrue(caught, "trying to set a label to a node but "
+ + "label doesn't exist in repository should fail");
// set a label on a node, but node is null or empty
@@ -218,7 +228,7 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
- Assert.assertTrue("trying to add a empty node but succeeded", caught);
+ assertTrue(caught, "trying to add a empty node but succeeded");
// set node->label one by one
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3"));
@@ -263,15 +273,16 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
// remove labels on node
mgr.removeLabelsFromNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"),
toNodeId("n2"), toSet("p3"), toNodeId("n3"), toSet("p3")));
- Assert.assertEquals(0, mgr.getNodeLabels().size());
+ assertEquals(0, mgr.getNodeLabels().size());
assertMapEquals(mgr.lastNodeToLabels, ImmutableMap.of(toNodeId("n1"),
CommonNodeLabelsManager.EMPTY_STRING_SET, toNodeId("n2"),
CommonNodeLabelsManager.EMPTY_STRING_SET, toNodeId("n3"),
CommonNodeLabelsManager.EMPTY_STRING_SET));
- public void testRemovelabelWithNodes() throws Exception {
+ void testRemovelabelWithNodes() throws Exception {
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1")));
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n2"), toSet("p2")));
@@ -283,21 +294,23 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
assertCollectionEquals(Arrays.asList("p1"), mgr.lastRemovedlabels);
mgr.removeFromClusterNodeLabels(ImmutableSet.of("p2", "p3"));
- Assert.assertTrue(mgr.getNodeLabels().isEmpty());
+ assertTrue(mgr.getNodeLabels().isEmpty());
assertCollectionEquals(Arrays.asList("p2", "p3"), mgr.lastRemovedlabels);
- public void testTrimLabelsWhenAddRemoveNodeLabels() throws IOException {
+ void testTrimLabelsWhenAddRemoveNodeLabels() throws IOException {
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet(" p1"));
assertCollectionEquals(toSet("p1"), mgr.getClusterNodeLabelNames());
mgr.removeFromClusterNodeLabels(toSet("p1 "));
- public void testTrimLabelsWhenModifyLabelsOnNodes() throws IOException {
+ void testTrimLabelsWhenModifyLabelsOnNodes() throws IOException {
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet(" p1", "p2"));
mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p1 ")));
assertMapEquals(
@@ -308,49 +321,51 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
mgr.getNodeLabels(),
ImmutableMap.of(toNodeId("n1"), toSet("p2")));
mgr.removeLabelsFromNode(ImmutableMap.of(toNodeId("n1"), toSet(" p2 ")));
- public void testReplaceLabelsOnHostsShouldUpdateNodesBelongTo()
+ void testReplaceLabelsOnHostsShouldUpdateNodesBelongTo()
mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p1")));
ImmutableMap.of(toNodeId("n1"), toSet("p1")));
// Replace labels on n1:1 to P2
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1:1"), toSet("p2"),
toNodeId("n1:2"), toSet("p2")));
assertMapEquals(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n1"),
toSet("p1"), toNodeId("n1:1"), toSet("p2"), toNodeId("n1:2"),
toSet("p2")));
// Replace labels on n1 to P1, both n1:1/n1 will be P1 now
toSet("p1"), toNodeId("n1:1"), toSet("p1"), toNodeId("n1:2"),
toSet("p1")));
// Set labels on n1:1 to P2 again to verify if add/remove works
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1:1"), toSet("p2")));
private void assertNodeLabelsDisabledErrorMessage(IOException e) {
- Assert.assertEquals(CommonNodeLabelsManager.NODE_LABELS_NOT_ENABLED_ERR,
+ assertEquals(CommonNodeLabelsManager.NODE_LABELS_NOT_ENABLED_ERR,
e.getMessage());
- public void testNodeLabelsDisabled() throws IOException {
+ void testNodeLabelsDisabled() throws IOException {
DummyCommonNodeLabelsManager mgr = new DummyCommonNodeLabelsManager();
conf.setBoolean(YarnConfiguration.NODE_LABELS_ENABLED, false);
mgr.init(conf);
// add labels
mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("x"));
@@ -359,9 +374,9 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
// check exception caught
- Assert.assertTrue(caught);
+ assertTrue(caught);
// remove labels
mgr.removeFromClusterNodeLabels(ImmutableSet.of("x"));
@@ -370,9 +385,9 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
// add labels to node
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("host", 0),
@@ -382,9 +397,9 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
// remove labels from node
mgr.removeLabelsFromNode(ImmutableMap.of(NodeId.newInstance("host", 0),
@@ -394,9 +409,9 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
// replace labels on node
mgr.replaceLabelsOnNode(ImmutableMap.of(NodeId.newInstance("host", 0),
@@ -406,14 +421,15 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
mgr.close();
- public void testLabelsToNodes()
+ void testLabelsToNodes()
@@ -421,7 +437,7 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
assertLabelsToNodesEquals(
labelsToNodes,
ImmutableMap.of(
- "p1", toSet(toNodeId("n1"))));
+ "p1", toSet(toNodeId("n1"))));
labelsToNodes, transposeNodeToLabels(mgr.getNodeLabels()));
@@ -432,8 +448,8 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
- "p1", toSet(toNodeId("n1")),
- "p2", toSet(toNodeId("n1:1"),toNodeId("n1:2"))));
+ "p1", toSet(toNodeId("n1")),
+ "p2", toSet(toNodeId("n1:1"), toNodeId("n1:2"))));
@@ -443,7 +459,7 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
- "p1", toSet(toNodeId("n1"),toNodeId("n1:1"),toNodeId("n1:2"))));
+ "p1", toSet(toNodeId("n1"), toNodeId("n1:1"), toNodeId("n1:2"))));
@@ -455,9 +471,9 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
- "p1", toSet(toNodeId("n1"),toNodeId("n1:2")),
- "p2", toSet(toNodeId("n1:1")),
- "p3", toSet(toNodeId("n2"))));
+ "p1", toSet(toNodeId("n1"), toNodeId("n1:2")),
+ "p2", toSet(toNodeId("n1:1")),
+ "p3", toSet(toNodeId("n2"))));
@@ -467,20 +483,21 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
- "p2", toSet(toNodeId("n1:1"))));
+ "p2", toSet(toNodeId("n1:1"))));
- public void testLabelsToNodesForSelectedLabels()
+ void testLabelsToNodesForSelectedLabels()
mgr.addLabelsToNode(
- toNodeId("n1:1"), toSet("p1"),
- toNodeId("n1:2"), toSet("p2")));
+ toNodeId("n1:1"), toSet("p1"),
+ toNodeId("n1:2"), toSet("p2")));
Set<String> setlabels =
new HashSet<String>(Arrays.asList(new String[]{"p1"}));
assertLabelsToNodesEquals(mgr.getLabelsToNodes(setlabels),
@@ -493,14 +510,14 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
mgr.getLabelsToNodes(setlabels),
- "p3", toSet(toNodeId("n1"), toNodeId("n1:1"),toNodeId("n1:2"))));
+ "p3", toSet(toNodeId("n1"), toNodeId("n1:1"), toNodeId("n1:2"))));
mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n2"), toSet("p2")));
- "p2", toSet(toNodeId("n2")),
+ "p2", toSet(toNodeId("n2")),
mgr.removeLabelsFromNode(ImmutableMap.of(toNodeId("n1"), toSet("p3")));
setlabels =
@@ -508,29 +525,30 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
- "p2", toSet(toNodeId("n2"))));
+ "p2", toSet(toNodeId("n2"))));
mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n3"), toSet("p1")));
- "p1", toSet(toNodeId("n3")),
+ "p1", toSet(toNodeId("n3")),
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n2:2"), toSet("p3")));
- "p3", toSet(toNodeId("n2:2"))));
+ "p3", toSet(toNodeId("n2:2"))));
setlabels = new HashSet<String>(Arrays.asList(new String[]{"p1"}));
ImmutableMap.of("p1", toSet(toNodeId("n3"))));
- public void testNoMoreThanOneLabelExistedInOneHost() throws IOException {
+ void testNoMoreThanOneLabelExistedInOneHost() throws IOException {
boolean failed = false;
// As in YARN-2694, we temporarily disable no more than one label existed in
// one host
@@ -540,14 +558,14 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
failed = true;
- Assert.assertTrue("Should failed when set > 1 labels on a host", failed);
+ assertTrue(failed, "Should failed when set > 1 labels on a host");
mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p1", "p2")));
- Assert.assertTrue("Should failed when add > 1 labels on a host", failed);
+ assertTrue(failed, "Should failed when add > 1 labels on a host");
// add a same label to a node, #labels in this node is still 1, shouldn't
@@ -558,20 +576,21 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
- Assert.assertTrue("Should failed when #labels > 1 on a host after add",
- failed);
+ assertTrue(failed,
+ "Should failed when #labels > 1 on a host after add");
private void verifyNodeLabelAdded(Set<String> expectedAddedLabelNames,
Collection<NodeLabel> addedNodeLabels) {
- Assert.assertEquals(expectedAddedLabelNames.size(), addedNodeLabels.size());
+ assertEquals(expectedAddedLabelNames.size(), addedNodeLabels.size());
for (NodeLabel label : addedNodeLabels) {
- Assert.assertTrue(expectedAddedLabelNames.contains(label.getName()));
+ assertTrue(expectedAddedLabelNames.contains(label.getName()));
- public void testReplaceLabelsOnNodeInDistributedMode() throws Exception {
+ void testReplaceLabelsOnNodeInDistributedMode() throws Exception {
//create new DummyCommonNodeLabelsManager than the one got from @before
@@ -587,16 +606,17 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
Set<String> labelsByNode = mgr.getLabelsByNode(toNodeId("n1"));
- Assert.assertNull(
- "Labels are not expected to be written to the NodeLabelStore",
- mgr.lastNodeToLabels);
- Assert.assertNotNull("Updated labels should be available from the Mgr",
- labelsByNode);
- Assert.assertTrue(labelsByNode.contains("p1"));
+ assertNull(
+ mgr.lastNodeToLabels,
+ "Labels are not expected to be written to the NodeLabelStore");
+ assertNotNull(labelsByNode,
+ "Updated labels should be available from the Mgr");
+ assertTrue(labelsByNode.contains("p1"));
- public void testLabelsInfoToNodes() throws IOException {
+ void testLabelsInfoToNodes() throws IOException {
mgr.addToCluserNodeLabels(Arrays.asList(NodeLabel.newInstance("p1", false),
NodeLabel.newInstance("p2", true), NodeLabel.newInstance("p3", true)));
@@ -605,8 +625,9 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
NodeLabel.newInstance("p1", false), toSet(toNodeId("n1"))));
- public void testGetNodeLabelsInfo() throws IOException {
+ void testGetNodeLabelsInfo() throws IOException {
NodeLabel.newInstance("p2", true), NodeLabel.newInstance("p3", false)));
mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p2")));
@@ -617,8 +638,9 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
toNodeId("n2"), toSet(NodeLabel.newInstance("p3", false))));
- public void testRemoveNodeLabelsInfo() throws IOException {
+ void testRemoveNodeLabelsInfo() throws IOException {
mgr.addToCluserNodeLabels(Arrays.asList(NodeLabel.newInstance("p1", true)));
mgr.addToCluserNodeLabels(Arrays.asList(NodeLabel.newInstance("p2", true)));
mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1:1"), toSet("p1")));
@@ -628,10 +650,10 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
- "p2", toSet(toNodeId("n1:1"), toNodeId("n1:0"))));
+ "p2", toSet(toNodeId("n1:1"), toNodeId("n1:0"))));
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), new HashSet()));
Map<String, Set<NodeId>> labelsToNodes2 = mgr.getLabelsToNodes();
- Assert.assertEquals(labelsToNodes2.get("p2"), null);
+ assertNull(labelsToNodes2.get("p2"));
@@ -24,23 +24,24 @@ import java.util.Arrays;
import org.apache.hadoop.yarn.event.InlineDispatcher;
public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase {
MockNodeLabelManager mgr = null;
Configuration conf = null;
@@ -57,26 +58,15 @@ public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase {
protected void startDispatcher() {
// do nothing
protected void stopDispatcher() {
- public TestFileSystemNodeLabelsStore(String className) {
- this.storeClassName = className;
- @Parameterized.Parameters
- public static Collection<String[]> getParameters() {
- return Arrays.asList(
- new String[][] { { FileSystemNodeLabelsStore.class.getCanonicalName() },
- { NonAppendableFSNodeLabelStore.class.getCanonicalName() } });
- public void before() throws IOException {
+ public void initTestFileSystemNodeLabelsStore(String className) throws IOException {
+ this.storeClassName = className;
mgr = new MockNodeLabelManager();
conf.setBoolean(YarnConfiguration.NODE_LABELS_ENABLED, true);
@@ -91,7 +81,13 @@ public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase {
+ public static Collection<String[]> getParameters() {
+ return Arrays.asList(
+ new String[][]{{FileSystemNodeLabelsStore.class.getCanonicalName()},
+ {NonAppendableFSNodeLabelStore.class.getCanonicalName()}});
public void after() throws IOException {
if (mgr.store instanceof FileSystemNodeLabelsStore) {
FileSystemNodeLabelsStore fsStore =
@@ -101,9 +97,12 @@ public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase {
- public void testRecoverWithMirror() throws Exception {
+ @MethodSource("getParameters")
+ void testRecoverWithMirror(String className) throws Exception {
+ initTestFileSystemNodeLabelsStore(className);
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p4"));
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p5", "p6"));
@@ -131,15 +130,15 @@ public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase {
// check variables
- Assert.assertEquals(3, mgr.getClusterNodeLabelNames().size());
+ assertEquals(3, mgr.getClusterNodeLabelNames().size());
Arrays.asList("p2", "p4", "p6")));
assertMapContains(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n2"),
toSet("p2"), toNodeId("n4"), toSet("p4"), toNodeId("n6"), toSet("p6"),
toNodeId("n7"), toSet("p6")));
assertLabelsToNodesEquals(mgr.getLabelsToNodes(),
- ImmutableMap.of(
+ ImmutableMap.of(
"p6", toSet(toNodeId("n6"), toNodeId("n7")),
"p4", toSet(toNodeId("n4")),
"p2", toSet(toNodeId("n2"))));
@@ -151,24 +150,27 @@ public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase {
- public void testRecoverWithDistributedNodeLabels() throws Exception {
+ void testRecoverWithDistributedNodeLabels(String className) throws Exception {
@@ -190,20 +192,23 @@ public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase {
- Assert.assertEquals(3, mgr.getClusterNodeLabels().size());
+ assertEquals(3, mgr.getClusterNodeLabels().size());
- Assert.assertTrue("During recovery in distributed node-labels setup, "
- + "node to labels mapping should not be recovered ", mgr
- .getNodeLabels().size() == 0);
+ assertTrue(mgr
+ .getNodeLabels().size() == 0, "During recovery in distributed node-labels setup, "
+ + "node to labels mapping should not be recovered ");
- public void testEditlogRecover() throws Exception {
+ void testEditlogRecover(String className) throws Exception {
@@ -231,24 +236,27 @@ public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase {
- @Test (timeout = 10000)
- public void testSerilizationAfterRecovery() throws Exception {
+ void testSerilizationAfterRecovery(String className) throws Exception {
// Add to cluster node labels, p2/p6 are non-exclusive.
mgr.addToCluserNodeLabels(Arrays.asList(NodeLabel.newInstance("p1", true),
NodeLabel.newInstance("p2", false), NodeLabel.newInstance("p3", true),
@@ -289,8 +297,8 @@ public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase {
@@ -298,13 +306,13 @@ public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase {
- "p6", toSet(toNodeId("n6"), toNodeId("n7")),
- "p4", toSet(toNodeId("n4")),
+ "p6", toSet(toNodeId("n6"), toNodeId("n7")),
+ "p4", toSet(toNodeId("n4")),
- Assert.assertFalse(mgr.isExclusiveNodeLabel("p2"));
- Assert.assertTrue(mgr.isExclusiveNodeLabel("p4"));
- Assert.assertFalse(mgr.isExclusiveNodeLabel("p6"));
+ assertFalse(mgr.isExclusiveNodeLabel("p2"));
+ assertTrue(mgr.isExclusiveNodeLabel("p4"));
+ assertFalse(mgr.isExclusiveNodeLabel("p6"));
* Add label p7,p8 then shutdown
@@ -314,7 +322,7 @@ public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase {
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p7", "p8"));
* Restart, add label p9 and shutdown
@@ -323,7 +331,7 @@ public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase {
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p9"));
* Recovery, and see if p9 added
@@ -332,14 +340,16 @@ public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase {
- Assert.assertEquals(6, mgr.getClusterNodeLabelNames().size());
+ assertEquals(6, mgr.getClusterNodeLabelNames().size());
Arrays.asList("p2", "p4", "p6", "p7", "p8", "p9")));
- public void testRootMkdirOnInitStore() throws Exception {
+ void testRootMkdirOnInitStore(String className) throws Exception {
final FileSystem mockFs = Mockito.mock(FileSystem.class);
FileSystemNodeLabelsStore mockStore = new FileSystemNodeLabelsStore() {
public void initFileSystem(Configuration config) throws IOException {
@@ -355,7 +365,7 @@ public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase {
private void verifyMkdirsCount(FileSystemNodeLabelsStore store,
- boolean existsRetVal, int expectedNumOfCalls)
+ boolean existsRetVal, int expectedNumOfCalls)
Mockito.when(store.getFs().exists(Mockito.any(
Path.class))).thenReturn(existsRetVal);
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableSet;
import org.apache.hadoop.yarn.api.records.NodeAttributeType;
* Test class to verify node label util ops.
public class TestNodeLabelUtil {
- public void testAttributeValueAddition() {
+ void testAttributeValueAddition() {
String[] values =
new String[]{"1_8", "1.8", "ABZ", "ABZ", "az", "a-z", "a_z",
"123456789"};
@@ -55,7 +57,7 @@ public class TestNodeLabelUtil {
- public void testIsNodeAttributesEquals() {
+ void testIsNodeAttributesEquals() {
NodeAttribute nodeAttributeCK1V1 = NodeAttribute
.newInstance(NodeAttribute.PREFIX_CENTRALIZED, "K1",
NodeAttributeType.STRING, "V1");
@@ -77,45 +79,45 @@ public class TestNodeLabelUtil {
* equals if set size equals and items are all the same
- Assert.assertTrue(NodeLabelUtil.isNodeAttributesEquals(null, null));
- Assert.assertTrue(NodeLabelUtil
+ assertTrue(NodeLabelUtil.isNodeAttributesEquals(null, null));
+ assertTrue(NodeLabelUtil
.isNodeAttributesEquals(ImmutableSet.of(), ImmutableSet.of()));
.isNodeAttributesEquals(ImmutableSet.of(nodeAttributeCK1V1),
ImmutableSet.of(nodeAttributeCK1V1Copy)));
.isNodeAttributesEquals(ImmutableSet.of(nodeAttributeDK1V1),
ImmutableSet.of(nodeAttributeDK1V1Copy)));
- Assert.assertTrue(NodeLabelUtil.isNodeAttributesEquals(
+ assertTrue(NodeLabelUtil.isNodeAttributesEquals(
ImmutableSet.of(nodeAttributeCK1V1, nodeAttributeDK1V1),
ImmutableSet.of(nodeAttributeCK1V1Copy, nodeAttributeDK1V1Copy)));
* not equals if set size not equals or items are different
NodeLabelUtil.isNodeAttributesEquals(null, ImmutableSet.of()));
NodeLabelUtil.isNodeAttributesEquals(ImmutableSet.of(), null));
// different attribute prefix
- Assert.assertFalse(NodeLabelUtil
+ assertFalse(NodeLabelUtil
ImmutableSet.of(nodeAttributeDK1V1)));
// different attribute name
ImmutableSet.of(nodeAttributeDK2V1)));
// different attribute value
.isNodeAttributesEquals(ImmutableSet.of(nodeAttributeDK2V1),
ImmutableSet.of(nodeAttributeDK2V2)));
// different set
ImmutableSet.of()));
ImmutableSet.of(nodeAttributeCK1V1, nodeAttributeDK1V1)));
- Assert.assertFalse(NodeLabelUtil.isNodeAttributesEquals(
+ assertFalse(NodeLabelUtil.isNodeAttributesEquals(
@@ -16,17 +16,18 @@
package org.apache.hadoop.yarn.resourcetypes;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
* Contains helper methods to create Resource and ResourceInformation objects.
* ResourceInformation can be created from a resource name
@@ -16,6 +16,14 @@
package org.apache.hadoop.yarn.security;
+import java.io.BufferedWriter;
@@ -25,16 +33,9 @@ import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.yarn.util.DockerClientConfigHandler;
-import java.io.BufferedWriter;
* Test the functionality of the DockerClientConfigHandler.
@@ -51,7 +52,7 @@ public class TestDockerClientConfigHandler {
private File file;
private Configuration conf = new Configuration();
file = File.createTempFile("docker-client-config", "test");
file.deleteOnExit();
@@ -61,7 +62,7 @@ public class TestDockerClientConfigHandler {
- public void testReadCredentialsFromConfigFile() throws Exception {
+ void testReadCredentialsFromConfigFile() throws Exception {
Credentials credentials =
DockerClientConfigHandler.readCredentialsFromConfigFile(
new Path(file.toURI()), conf, APPLICATION_ID);
@@ -85,7 +86,7 @@ public class TestDockerClientConfigHandler {
- public void testGetCredentialsFromTokensByteBuffer() throws Exception {
+ void testGetCredentialsFromTokensByteBuffer() throws Exception {
@@ -110,7 +111,7 @@ public class TestDockerClientConfigHandler {
- public void testWriteDockerCredentialsToPath() throws Exception {
+ void testWriteDockerCredentialsToPath() throws Exception {
File outFile = File.createTempFile("docker-client-config", "out");
outFile.deleteOnExit();
@@ -21,6 +21,8 @@ import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import org.apache.hadoop.io.DataInputBuffer;
@@ -43,31 +45,33 @@ import org.apache.hadoop.yarn.security.client.ClientToAMTokenIdentifier;
import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier;
import org.apache.hadoop.yarn.server.api.ContainerType;
public class TestYARNTokenIdentifier {
- public void testNMTokenIdentifier() throws IOException {
+ void testNMTokenIdentifier() throws IOException {
testNMTokenIdentifier(false);
- public void testNMTokenIdentifierOldFormat() throws IOException {
+ void testNMTokenIdentifierOldFormat() throws IOException {
testNMTokenIdentifier(true);
public void testNMTokenIdentifier(boolean oldFormat) throws IOException {
- ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(
- ApplicationId.newInstance(1, 1), 1);
+ ApplicationAttemptId appAttemptId =
+ ApplicationAttemptId.newInstance(ApplicationId.newInstance(1, 1), 1);
NodeId nodeId = NodeId.newInstance("host0", 0);
String applicationSubmitter = "usr0";
int masterKeyId = 1;
- NMTokenIdentifier token = new NMTokenIdentifier(
- appAttemptId, nodeId, applicationSubmitter, masterKeyId);
+ NMTokenIdentifier token =
+ new NMTokenIdentifier(appAttemptId, nodeId, applicationSubmitter, masterKeyId);
NMTokenIdentifier anotherToken = new NMTokenIdentifier();
byte[] tokenContent;
@@ -79,36 +83,32 @@ public class TestYARNTokenIdentifier {
DataInputBuffer dib = new DataInputBuffer();
dib.reset(tokenContent, tokenContent.length);
anotherToken.readFields(dib);
- Assert.assertEquals("Token is not the same after serialization " +
- "and deserialization.", token, anotherToken);
+ assertEquals(token, anotherToken,
+ "Token is not the same after serialization " + "and deserialization.");
// verify all properties are the same as original
- "appAttemptId from proto is not the same with original token",
- anotherToken.getApplicationAttemptId(), appAttemptId);
- "NodeId from proto is not the same with original token",
- anotherToken.getNodeId(), nodeId);
- "applicationSubmitter from proto is not the same with original token",
- anotherToken.getApplicationSubmitter(), applicationSubmitter);
- "masterKeyId from proto is not the same with original token",
- anotherToken.getKeyId(), masterKeyId);
+ assertEquals(anotherToken.getApplicationAttemptId(), appAttemptId,
+ "appAttemptId from proto is not the same with original token");
+ assertEquals(anotherToken.getNodeId(), nodeId,
+ "NodeId from proto is not the same with original token");
+ assertEquals(anotherToken.getApplicationSubmitter(), applicationSubmitter,
+ "applicationSubmitter from proto is not the same with original token");
+ assertEquals(anotherToken.getKeyId(), masterKeyId,
+ "masterKeyId from proto is not the same with original token");
- public void testAMRMTokenIdentifier() throws IOException {
+ void testAMRMTokenIdentifier() throws IOException {
testAMRMTokenIdentifier(false);
- public void testAMRMTokenIdentifierOldFormat() throws IOException {
+ void testAMRMTokenIdentifierOldFormat() throws IOException {
testAMRMTokenIdentifier(true);
@@ -130,55 +130,55 @@ public class TestYARNTokenIdentifier {
- Assert.assertEquals("ApplicationAttemptId from proto is not the same with original token",
- Assert.assertEquals("masterKeyId from proto is not the same with original token",
+ "ApplicationAttemptId from proto is not the same with original token");
- public void testClientToAMTokenIdentifier() throws IOException {
+ void testClientToAMTokenIdentifier() throws IOException {
ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(
ApplicationId.newInstance(1, 1), 1);
String clientName = "user";
ClientToAMTokenIdentifier token = new ClientToAMTokenIdentifier(
appAttemptId, clientName);
ClientToAMTokenIdentifier anotherToken = new ClientToAMTokenIdentifier();
byte[] tokenContent = token.getBytes();
- anotherToken.getApplicationAttemptID(), appAttemptId);
- Assert.assertEquals("clientName from proto is not the same with original token",
- anotherToken.getClientName(), clientName);
+ assertEquals(anotherToken.getApplicationAttemptID(), appAttemptId,
+ assertEquals(anotherToken.getClientName(), clientName,
+ "clientName from proto is not the same with original token");
- public void testContainerTokenIdentifierProtoMissingFields()
+ void testContainerTokenIdentifierProtoMissingFields()
ContainerTokenIdentifierProto.Builder builder =
ContainerTokenIdentifierProto.newBuilder();
ContainerTokenIdentifierProto proto = builder.build();
- Assert.assertFalse(proto.hasContainerType());
- Assert.assertFalse(proto.hasExecutionType());
- Assert.assertFalse(proto.hasNodeLabelExpression());
+ assertFalse(proto.hasContainerType());
+ assertFalse(proto.hasExecutionType());
+ assertFalse(proto.hasNodeLabelExpression());
byte[] tokenData = proto.toByteArray();
@@ -186,21 +186,19 @@ public class TestYARNTokenIdentifier {
ContainerTokenIdentifier tid = new ContainerTokenIdentifier();
tid.readFields(dib);
- Assert.assertEquals("container type",
- ContainerType.TASK, tid.getContainerType());
- Assert.assertEquals("execution type",
- ExecutionType.GUARANTEED, tid.getExecutionType());
- Assert.assertEquals("node label expression",
- CommonNodeLabelsManager.NO_LABEL, tid.getNodeLabelExpression());
+ assertEquals(ContainerType.TASK, tid.getContainerType(), "container type");
+ assertEquals(ExecutionType.GUARANTEED, tid.getExecutionType(), "execution type");
+ assertEquals(CommonNodeLabelsManager.NO_LABEL, tid.getNodeLabelExpression(),
+ "node label expression");
- public void testContainerTokenIdentifier() throws IOException {
+ void testContainerTokenIdentifier() throws IOException {
testContainerTokenIdentifier(false, false);
- public void testContainerTokenIdentifierOldFormat() throws IOException {
+ void testContainerTokenIdentifierOldFormat() throws IOException {
testContainerTokenIdentifier(true, true);
testContainerTokenIdentifier(true, false);
@@ -236,64 +234,55 @@ public class TestYARNTokenIdentifier {
- "ContainerID from proto is not the same with original token",
- anotherToken.getContainerID(), containerID);
- "Hostname from proto is not the same with original token",
- anotherToken.getNmHostAddress(), hostName);
- "ApplicationSubmitter from proto is not the same with original token",
- anotherToken.getApplicationSubmitter(), appSubmitter);
- "Resource from proto is not the same with original token",
- anotherToken.getResource(), r);
- "expiryTimeStamp from proto is not the same with original token",
- anotherToken.getExpiryTimeStamp(), expiryTimeStamp);
- "KeyId from proto is not the same with original token",
- anotherToken.getMasterKeyId(), masterKeyId);
- "RMIdentifier from proto is not the same with original token",
- anotherToken.getRMIdentifier(), rmIdentifier);
- "Priority from proto is not the same with original token",
- anotherToken.getPriority(), priority);
- "CreationTime from proto is not the same with original token",
- anotherToken.getCreationTime(), creationTime);
+ assertEquals(anotherToken.getContainerID(), containerID,
+ "ContainerID from proto is not the same with original token");
+ assertEquals(anotherToken.getNmHostAddress(), hostName,
+ "Hostname from proto is not the same with original token");
+ assertEquals(anotherToken.getApplicationSubmitter(), appSubmitter,
+ "ApplicationSubmitter from proto is not the same with original token");
+ assertEquals(anotherToken.getResource(), r,
+ "Resource from proto is not the same with original token");
+ assertEquals(anotherToken.getExpiryTimeStamp(), expiryTimeStamp,
+ "expiryTimeStamp from proto is not the same with original token");
+ assertEquals(anotherToken.getMasterKeyId(), masterKeyId,
+ "KeyId from proto is not the same with original token");
+ assertEquals(anotherToken.getRMIdentifier(), rmIdentifier,
+ "RMIdentifier from proto is not the same with original token");
+ assertEquals(anotherToken.getPriority(), priority,
+ "Priority from proto is not the same with original token");
+ assertEquals(anotherToken.getCreationTime(), creationTime,
+ "CreationTime from proto is not the same with original token");
- Assert.assertNull(anotherToken.getLogAggregationContext());
+ assertNull(anotherToken.getLogAggregationContext());
- Assert.assertEquals(CommonNodeLabelsManager.NO_LABEL,
+ assertEquals(CommonNodeLabelsManager.NO_LABEL,
anotherToken.getNodeLabelExpression());
- Assert.assertEquals(ContainerType.TASK,
+ assertEquals(ContainerType.TASK,
anotherToken.getContainerType());
- Assert.assertEquals(ExecutionType.GUARANTEED,
+ assertEquals(ExecutionType.GUARANTEED,
anotherToken.getExecutionType());
- public void testRMDelegationTokenIdentifier() throws IOException {
+ void testRMDelegationTokenIdentifier() throws IOException {
testRMDelegationTokenIdentifier(false);
- public void testRMDelegationTokenIdentifierOldFormat() throws IOException {
+ void testRMDelegationTokenIdentifierOldFormat() throws IOException {
testRMDelegationTokenIdentifier(true);
@@ -333,30 +322,22 @@ public class TestYARNTokenIdentifier {
dib.close();
- "Token is not the same after serialization and deserialization.",
- originalToken, anotherToken);
- "owner from proto is not the same with original token",
- owner, anotherToken.getOwner());
- "renewer from proto is not the same with original token",
- renewer, anotherToken.getRenewer());
- "realUser from proto is not the same with original token",
- realUser, anotherToken.getRealUser());
- "issueDate from proto is not the same with original token",
- issueDate, anotherToken.getIssueDate());
- "maxDate from proto is not the same with original token",
- maxDate, anotherToken.getMaxDate());
- "sequenceNumber from proto is not the same with original token",
- sequenceNumber, anotherToken.getSequenceNumber());
- masterKeyId, anotherToken.getMasterKeyId());
+ assertEquals(originalToken, anotherToken,
+ "Token is not the same after serialization and deserialization.");
+ assertEquals(owner, anotherToken.getOwner(),
+ "owner from proto is not the same with original token");
+ assertEquals(renewer, anotherToken.getRenewer(),
+ "renewer from proto is not the same with original token");
+ assertEquals(realUser, anotherToken.getRealUser(),
+ "realUser from proto is not the same with original token");
+ assertEquals(issueDate, anotherToken.getIssueDate(),
+ "issueDate from proto is not the same with original token");
+ assertEquals(maxDate, anotherToken.getMaxDate(),
+ "maxDate from proto is not the same with original token");
+ assertEquals(sequenceNumber, anotherToken.getSequenceNumber(),
+ "sequenceNumber from proto is not the same with original token");
+ assertEquals(masterKeyId, anotherToken.getMasterKeyId(),
// Test getProto
YARNDelegationTokenIdentifierProto tokenProto = originalToken.getProto();
@@ -372,15 +353,15 @@ public class TestYARNTokenIdentifier {
readToken.readFields(db);
// Verify if read token equals with original token
- Assert.assertEquals("Token from getProto is not the same after " +
- "serialization and deserialization.", originalToken, readToken);
+ assertEquals(originalToken, readToken, "Token from getProto is not the same after " +
+ "serialization and deserialization.");
db.close();
out.close();
- public void testTimelineDelegationTokenIdentifier() throws IOException {
+ void testTimelineDelegationTokenIdentifier() throws IOException {
Text owner = new Text("user1");
Text renewer = new Text("user2");
Text realUser = new Text("user3");
@@ -388,50 +369,50 @@ public class TestYARNTokenIdentifier {
long maxDate = 2;
int sequenceNumber = 3;
int masterKeyId = 4;
- TimelineDelegationTokenIdentifier token =
+ TimelineDelegationTokenIdentifier token =
new TimelineDelegationTokenIdentifier(owner, renewer, realUser);
token.setIssueDate(issueDate);
token.setMaxDate(maxDate);
token.setSequenceNumber(sequenceNumber);
token.setMasterKeyId(masterKeyId);
- TimelineDelegationTokenIdentifier anotherToken =
+ TimelineDelegationTokenIdentifier anotherToken =
new TimelineDelegationTokenIdentifier();
- Assert.assertEquals("owner from proto is not the same with original token",
- anotherToken.getOwner(), owner);
- Assert.assertEquals("renewer from proto is not the same with original token",
- anotherToken.getRenewer(), renewer);
- Assert.assertEquals("realUser from proto is not the same with original token",
- anotherToken.getRealUser(), realUser);
- Assert.assertEquals("issueDate from proto is not the same with original token",
- anotherToken.getIssueDate(), issueDate);
- Assert.assertEquals("maxDate from proto is not the same with original token",
- anotherToken.getMaxDate(), maxDate);
- Assert.assertEquals("sequenceNumber from proto is not the same with original token",
- anotherToken.getSequenceNumber(), sequenceNumber);
+ assertEquals(anotherToken.getOwner(), owner,
+ assertEquals(anotherToken.getRenewer(), renewer,
+ assertEquals(anotherToken.getRealUser(), realUser,
+ assertEquals(anotherToken.getIssueDate(), issueDate,
+ assertEquals(anotherToken.getMaxDate(), maxDate,
+ assertEquals(anotherToken.getSequenceNumber(), sequenceNumber,
- public void testParseTimelineDelegationTokenIdentifierRenewer() throws IOException {
+ void testParseTimelineDelegationTokenIdentifierRenewer() throws IOException {
// Server side when generation a timeline DT
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTH_TO_LOCAL,
@@ -442,11 +423,11 @@ public class TestYARNTokenIdentifier {
Text realUser = new Text("realUser");
TimelineDelegationTokenIdentifier token =
- Assert.assertEquals(new Text("yarn"), token.getRenewer());
+ assertEquals(new Text("yarn"), token.getRenewer());
- public void testAMContainerTokenIdentifier() throws IOException {
+ void testAMContainerTokenIdentifier() throws IOException {
ContainerId containerID = ContainerId.newContainerId(
ApplicationAttemptId.newInstance(ApplicationId.newInstance(
1, 1), 1), 1);
@@ -471,10 +452,10 @@ public class TestYARNTokenIdentifier {
- Assert.assertEquals(ContainerType.APPLICATION_MASTER,
+ assertEquals(ContainerType.APPLICATION_MASTER,
token =
@@ -490,10 +471,10 @@ public class TestYARNTokenIdentifier {
- Assert.assertEquals(ExecutionType.OPPORTUNISTIC,
+ assertEquals(ExecutionType.OPPORTUNISTIC,
@@ -17,18 +17,19 @@
package org.apache.hadoop.yarn.server.security;
public class TestApplicationACLsManager {
@@ -39,14 +40,14 @@ public class TestApplicationACLsManager {
private static final String TESTUSER3 = "testuser3";
- public void testCheckAccess() {
+ void testCheckAccess() {
conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE,
true);
conf.set(YarnConfiguration.YARN_ADMIN_ACL,
ADMIN_USER);
ApplicationACLsManager aclManager = new ApplicationACLsManager(conf);
- Map<ApplicationAccessType, String> aclMap =
+ Map<ApplicationAccessType, String> aclMap =
new HashMap<ApplicationAccessType, String>();
aclMap.put(ApplicationAccessType.VIEW_APP, TESTUSER1 + "," + TESTUSER3);
aclMap.put(ApplicationAccessType.MODIFY_APP, TESTUSER1);
@@ -56,46 +57,46 @@ public class TestApplicationACLsManager {
//User in ACL, should be allowed access
UserGroupInformation testUser1 = UserGroupInformation
.createRemoteUser(TESTUSER1);
- assertTrue(aclManager.checkAccess(testUser1, ApplicationAccessType.VIEW_APP,
+ assertTrue(aclManager.checkAccess(testUser1, ApplicationAccessType.VIEW_APP,
APP_OWNER, appId));
- assertTrue(aclManager.checkAccess(testUser1, ApplicationAccessType.MODIFY_APP,
+ assertTrue(aclManager.checkAccess(testUser1, ApplicationAccessType.MODIFY_APP,
//User NOT in ACL, should not be allowed access
UserGroupInformation testUser2 = UserGroupInformation
.createRemoteUser(TESTUSER2);
- assertFalse(aclManager.checkAccess(testUser2, ApplicationAccessType.VIEW_APP,
+ assertFalse(aclManager.checkAccess(testUser2, ApplicationAccessType.VIEW_APP,
- assertFalse(aclManager.checkAccess(testUser2, ApplicationAccessType.MODIFY_APP,
+ assertFalse(aclManager.checkAccess(testUser2, ApplicationAccessType.MODIFY_APP,
//User has View access, but not modify access
UserGroupInformation testUser3 = UserGroupInformation
.createRemoteUser(TESTUSER3);
- assertTrue(aclManager.checkAccess(testUser3, ApplicationAccessType.VIEW_APP,
+ assertTrue(aclManager.checkAccess(testUser3, ApplicationAccessType.VIEW_APP,
- assertFalse(aclManager.checkAccess(testUser3, ApplicationAccessType.MODIFY_APP,
+ assertFalse(aclManager.checkAccess(testUser3, ApplicationAccessType.MODIFY_APP,
//Application Owner should have all access
UserGroupInformation appOwner = UserGroupInformation
.createRemoteUser(APP_OWNER);
- assertTrue(aclManager.checkAccess(appOwner, ApplicationAccessType.VIEW_APP,
+ assertTrue(aclManager.checkAccess(appOwner, ApplicationAccessType.VIEW_APP,
- assertTrue(aclManager.checkAccess(appOwner, ApplicationAccessType.MODIFY_APP,
+ assertTrue(aclManager.checkAccess(appOwner, ApplicationAccessType.MODIFY_APP,
//Admin should have all access
UserGroupInformation adminUser = UserGroupInformation
.createRemoteUser(ADMIN_USER);
- assertTrue(aclManager.checkAccess(adminUser, ApplicationAccessType.VIEW_APP,
+ assertTrue(aclManager.checkAccess(adminUser, ApplicationAccessType.VIEW_APP,
- assertTrue(aclManager.checkAccess(adminUser, ApplicationAccessType.MODIFY_APP,
+ assertTrue(aclManager.checkAccess(adminUser, ApplicationAccessType.MODIFY_APP,
- public void testCheckAccessWithNullACLS() {
+ void testCheckAccessWithNullACLS() {
@@ -108,30 +109,30 @@ public class TestApplicationACLsManager {
//Application ACL is not added
//Application Owner should have all access even if Application ACL is not added
// A regular user should Not have access
- assertFalse(aclManager.checkAccess(testUser1, ApplicationAccessType.VIEW_APP,
+ assertFalse(aclManager.checkAccess(testUser1, ApplicationAccessType.VIEW_APP,
- assertFalse(aclManager.checkAccess(testUser1, ApplicationAccessType.MODIFY_APP,
+ assertFalse(aclManager.checkAccess(testUser1, ApplicationAccessType.MODIFY_APP,
- public void testCheckAccessWithPartialACLS() {
+ void testCheckAccessWithPartialACLS() {
@@ -141,40 +142,40 @@ public class TestApplicationACLsManager {
// Add only the VIEW ACLS
- aclMap.put(ApplicationAccessType.VIEW_APP, TESTUSER1 );
+ aclMap.put(ApplicationAccessType.VIEW_APP, TESTUSER1);
ApplicationId appId = ApplicationId.newInstance(1, 1);
aclManager.addApplication(appId, aclMap);
// testuser1 should have view access only
// A testuser2 should Not have access
@@ -18,23 +18,24 @@
package org.apache.hadoop.yarn.util;
+import java.util.Enumeration;
import org.apache.hadoop.util.Time;
import org.apache.log4j.Appender;
import org.apache.log4j.AppenderSkeleton;
-import org.apache.log4j.Priority;
import org.apache.log4j.LogManager;
-import java.util.Enumeration;
+import org.apache.log4j.Priority;
import static org.apache.hadoop.util.GenericsUtil.isLog4jLogger;
public class TestAdHocLogDumper {
@@ -42,14 +43,14 @@ public class TestAdHocLogDumper {
LoggerFactory.getLogger(TestAdHocLogDumper.class);
- public void testDumpingSchedulerLogs() throws Exception {
+ void testDumpingSchedulerLogs() throws Exception {
Map<Appender, Priority> levels = new HashMap<>();
String logFilename = "test.log";
Logger logger = LoggerFactory.getLogger(TestAdHocLogDumper.class);
if (isLog4jLogger(this.getClass())) {
- for (Enumeration appenders = LogManager.getRootLogger().
- getAllAppenders(); appenders.hasMoreElements();) {
+ for (Enumeration appenders =
+ LogManager.getRootLogger().getAllAppenders(); appenders.hasMoreElements();) {
Object obj = appenders.nextElement();
if (obj instanceof AppenderSkeleton) {
AppenderSkeleton appender = (AppenderSkeleton) obj;
@@ -64,11 +65,11 @@ public class TestAdHocLogDumper {
LOG.debug("test message 1");
LOG.info("test message 2");
File logFile = new File(logFilename);
- Assert.assertTrue(logFile.exists());
+ assertTrue(logFile.exists());
long lastWrite = logFile.lastModified();
- Assert.assertTrue(lastWrite < Time.now());
- Assert.assertTrue(logFile.length() != 0);
+ assertTrue(lastWrite < Time.now());
+ assertTrue(logFile.length() != 0);
// make sure levels are set back to their original values
@@ -77,12 +78,12 @@ public class TestAdHocLogDumper {
- Assert.assertEquals(levels.get(appender), appender.getThreshold());
+ assertEquals(levels.get(appender), appender.getThreshold());
boolean del = logFile.delete();
- if(!del) {
+ if (!del) {
LOG.info("Couldn't clean up after test");
@@ -17,22 +17,23 @@
-import org.apache.hadoop.util.Shell;
+import org.apache.hadoop.util.Shell;
public class TestApps {
- public void testSetEnvFromInputString() {
+ void testSetEnvFromInputString() {
environment.put("JAVA_HOME", "/path/jdk");
String goodEnv = "a1=1,b_2=2,_c=3,d=4,e=,f_win=%JAVA_HOME%"
@@ -64,7 +65,7 @@ public class TestApps {
- public void testSetEnvFromInputProperty() {
+ void testSetEnvFromInputProperty() {
Configuration conf = new Configuration(false);
Map<String, String> env = new HashMap<>();
String propName = "mapreduce.map.env";
@@ -91,7 +92,7 @@ public class TestApps {
- public void testSetEnvFromInputPropertyDefault() {
+ void testSetEnvFromInputPropertyDefault() {
@@ -122,7 +123,7 @@ public class TestApps {
- public void testSetEnvFromInputPropertyOverrideDefault() {
+ void testSetEnvFromInputPropertyOverrideDefault() {
@@ -152,7 +153,7 @@ public class TestApps {
- public void testSetEnvFromInputPropertyCommas() {
+ void testSetEnvFromInputPropertyCommas() {
String propName = "mapreduce.reduce.env";
@@ -176,7 +177,7 @@ public class TestApps {
- public void testSetEnvFromInputPropertyNull() {
+ void testSetEnvFromInputPropertyNull() {
@@ -18,48 +18,49 @@
-import org.junit.rules.ExpectedException;
* Test class for {@link BoundedAppender}.
public class TestBoundedAppender {
- public ExpectedException expected = ExpectedException.none();
- public void initWithZeroLimitThrowsException() {
- expected.expect(IllegalArgumentException.class);
- expected.expectMessage("limit should be positive");
+ void initWithZeroLimitThrowsException() {
+ Throwable exception = assertThrows(IllegalArgumentException.class, () -> {
- new BoundedAppender(0);
+ new BoundedAppender(0);
+ assertTrue(exception.getMessage().contains("limit should be positive"));
- public void nullAppendedNullStringRead() {
+ void nullAppendedNullStringRead() {
final BoundedAppender boundedAppender = new BoundedAppender(4);
boundedAppender.append(null);
- assertEquals("null appended, \"null\" read", "null",
- boundedAppender.toString());
+ assertEquals("null",
+ boundedAppender.toString(),
+ "null appended, \"null\" read");
- public void appendBelowLimitOnceValueIsReadCorrectly() {
+ void appendBelowLimitOnceValueIsReadCorrectly() {
final BoundedAppender boundedAppender = new BoundedAppender(2);
boundedAppender.append("ab");
- assertEquals("value appended is read correctly", "ab",
+ assertEquals("ab",
+ "value appended is read correctly");
- public void appendValuesBelowLimitAreReadCorrectlyInFifoOrder() {
+ void appendValuesBelowLimitAreReadCorrectlyInFifoOrder() {
final BoundedAppender boundedAppender = new BoundedAppender(3);
@@ -67,13 +68,13 @@ public class TestBoundedAppender {
boundedAppender.append("e");
boundedAppender.append("fg");
- assertEquals("last values appended fitting limit are read correctly",
- String.format(BoundedAppender.TRUNCATED_MESSAGES_TEMPLATE, 3, 7, "efg"),
+ assertEquals(String.format(BoundedAppender.TRUNCATED_MESSAGES_TEMPLATE, 3, 7, "efg"),
+ "last values appended fitting limit are read correctly");
- public void appendLastAboveLimitPreservesLastMessagePostfix() {
+ void appendLastAboveLimitPreservesLastMessagePostfix() {
@@ -81,35 +82,37 @@ public class TestBoundedAppender {
boundedAppender.append("fghij");
- "last value appended above limit postfix is read correctly", String
+ String
.format(BoundedAppender.TRUNCATED_MESSAGES_TEMPLATE, 3, 10, "hij"),
+ "last value appended above limit postfix is read correctly");
- public void appendMiddleAboveLimitPreservesLastMessageAndMiddlePostfix() {
+ void appendMiddleAboveLimitPreservesLastMessageAndMiddlePostfix() {
boundedAppender.append("cde");
- assertEquals("last value appended above limit postfix is read correctly",
- String.format(BoundedAppender.TRUNCATED_MESSAGES_TEMPLATE, 3, 5, "cde"),
+ assertEquals(String.format(BoundedAppender.TRUNCATED_MESSAGES_TEMPLATE, 3, 5, "cde"),
- "middle value appended above limit postfix and last value are "
- + "read correctly",
String.format(BoundedAppender.TRUNCATED_MESSAGES_TEMPLATE, 3, 7, "efg"),
+ "middle value appended above limit postfix and last value are "
+ + "read correctly");
boundedAppender.append("hijkl");
.format(BoundedAppender.TRUNCATED_MESSAGES_TEMPLATE, 3, 12, "jkl"),
@@ -17,23 +17,25 @@
-import static org.assertj.core.api.Assertions.assertThat;
import java.net.URISyntaxException;
import org.apache.hadoop.yarn.api.TestContainerId;
-import org.apache.hadoop.yarn.api.records.URL;
+import org.apache.hadoop.yarn.api.records.URL;
+import static org.assertj.core.api.Assertions.assertThat;
public class TestConverterUtils {
- public void testConvertUrlWithNoPort() throws URISyntaxException {
+ void testConvertUrlWithNoPort() throws URISyntaxException {
Path expectedPath = new Path("hdfs://foo.com");
URL url = URL.fromPath(expectedPath);
Path actualPath = url.toPath();
@@ -41,15 +43,15 @@ public class TestConverterUtils {
- public void testConvertUrlWithUserinfo() throws URISyntaxException {
+ void testConvertUrlWithUserinfo() throws URISyntaxException {
Path expectedPath = new Path("foo://username:password@example.com:8042");
assertEquals(expectedPath, actualPath);
- public void testContainerId() throws URISyntaxException {
+ void testContainerId() throws URISyntaxException {
ContainerId id = TestContainerId.newContainerId(0, 0, 0, 0);
String cid = id.toString();
assertEquals("container_0_0000_00_000000", cid);
@@ -58,7 +60,7 @@ public class TestConverterUtils {
- public void testContainerIdWithEpoch() throws URISyntaxException {
+ void testContainerIdWithEpoch() throws URISyntaxException {
ContainerId id = TestContainerId.newContainerId(0, 0, 0, 25645811);
assertEquals("container_0_0000_00_25645811", cid);
@@ -85,38 +87,44 @@ public class TestConverterUtils {
- public void testContainerIdNull() throws URISyntaxException {
- assertNull(ConverterUtils.toString((ContainerId)null));
+ void testContainerIdNull() throws URISyntaxException {
+ assertNull(ConverterUtils.toString((ContainerId) null));
- public void testNodeIdWithDefaultPort() throws URISyntaxException {
+ void testNodeIdWithDefaultPort() throws URISyntaxException {
NodeId nid;
nid = ConverterUtils.toNodeIdWithDefaultPort("node:10");
assertThat(nid.getPort()).isEqualTo(10);
assertThat(nid.getHost()).isEqualTo("node");
nid = ConverterUtils.toNodeIdWithDefaultPort("node");
assertThat(nid.getPort()).isEqualTo(0);
- @Test(expected = IllegalArgumentException.class)
- public void testInvalidContainerId() {
- ContainerId.fromString("container_e20_1423221031460_0003_01");
+ void testInvalidContainerId() {
+ assertThrows(IllegalArgumentException.class, () -> {
+ ContainerId.fromString("container_e20_1423221031460_0003_01");
- public void testInvalidAppattemptId() {
- ConverterUtils.toApplicationAttemptId("appattempt_1423221031460");
+ void testInvalidAppattemptId() {
+ ConverterUtils.toApplicationAttemptId("appattempt_1423221031460");
- ConverterUtils.toApplicationId("application_1423221031460");
+ ConverterUtils.toApplicationId("application_1423221031460");
@@ -18,13 +18,6 @@
-import static org.apache.hadoop.fs.CreateFlag.CREATE;
-import static org.apache.hadoop.fs.CreateFlag.OVERWRITE;
-import static org.junit.Assume.assumeTrue;
import java.io.FileOutputStream;
@@ -52,14 +45,17 @@ import java.util.zip.GZIPOutputStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
-import org.apache.hadoop.util.concurrent.HadoopExecutors;
+import org.apache.hadoop.thirdparty.com.google.common.cache.CacheBuilder;
+import org.apache.hadoop.thirdparty.com.google.common.cache.CacheLoader;
+import org.apache.hadoop.thirdparty.com.google.common.cache.LoadingCache;
+import org.junit.jupiter.api.AfterAll;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
import org.apache.hadoop.fs.CommonConfigurationKeys;
@@ -70,17 +66,21 @@ import org.apache.hadoop.fs.LocalDirAllocator;
+import org.apache.hadoop.util.concurrent.HadoopExecutors;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.LocalResourceType;
import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
-import org.junit.AfterClass;
-import org.apache.hadoop.thirdparty.com.google.common.cache.CacheBuilder;
-import org.apache.hadoop.thirdparty.com.google.common.cache.CacheLoader;
-import org.apache.hadoop.thirdparty.com.google.common.cache.LoadingCache;
+import static org.apache.hadoop.fs.CreateFlag.CREATE;
+import static org.apache.hadoop.fs.CreateFlag.OVERWRITE;
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
* Unit test for the FSDownload class.
@@ -96,7 +96,7 @@ public class TestFSDownload {
};
- @AfterClass
+ @AfterAll
public static void deleteTestDir() throws IOException {
FileContext fs = FileContext.getLocalFSFileContext();
fs.delete(new Path("target", TestFSDownload.class.getSimpleName()), true);
@@ -270,16 +270,17 @@ public class TestFSDownload {
return ret;
- @Test (timeout=10000)
- public void testDownloadBadPublic() throws IOException, URISyntaxException,
+ void testDownloadBadPublic() throws IOException, URISyntaxException,
InterruptedException {
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077");
FileContext files = FileContext.getLocalFSFileContext(conf);
final Path basedir = files.makeQualified(new Path("target",
- TestFSDownload.class.getSimpleName()));
+ TestFSDownload.class.getSimpleName()));
files.mkdir(basedir, null, true);
conf.setStrings(TestFSDownload.class.getName(), basedir.toString());
Map<LocalResource, LocalResourceVisibility> rsrcVis =
new HashMap<LocalResource, LocalResourceVisibility>();
@@ -288,11 +289,11 @@ public class TestFSDownload {
rand.setSeed(sharedSeed);
System.out.println("SEED: " + sharedSeed);
- Map<LocalResource,Future<Path>> pending =
- new HashMap<LocalResource,Future<Path>>();
+ Map<LocalResource, Future<Path>> pending =
+ new HashMap<LocalResource, Future<Path>>();
ExecutorService exec = HadoopExecutors.newSingleThreadExecutor();
LocalDirAllocator dirs =
- new LocalDirAllocator(TestFSDownload.class.getName());
+ new LocalDirAllocator(TestFSDownload.class.getName());
int size = 512;
LocalResourceVisibility vis = LocalResourceVisibility.PUBLIC;
Path path = new Path(basedir, "test-file");
@@ -300,32 +301,33 @@ public class TestFSDownload {
rsrcVis.put(rsrc, vis);
Path destPath = dirs.getLocalPathForWrite(
basedir.toString(), size, conf);
- destPath = new Path (destPath,
- Long.toString(uniqueNumberGenerator.incrementAndGet()));
+ destPath = new Path(destPath,
+ Long.toString(uniqueNumberGenerator.incrementAndGet()));
FSDownload fsd =
- new FSDownload(files, UserGroupInformation.getCurrentUser(), conf,
- destPath, rsrc);
+ new FSDownload(files, UserGroupInformation.getCurrentUser(), conf,
+ destPath, rsrc);
pending.put(rsrc, exec.submit(fsd));
exec.shutdown();
- while (!exec.awaitTermination(1000, TimeUnit.MILLISECONDS));
- Assert.assertTrue(pending.get(rsrc).isDone());
+ while (!exec.awaitTermination(1000, TimeUnit.MILLISECONDS)) ;
+ assertTrue(pending.get(rsrc).isDone());
- for (Map.Entry<LocalResource,Future<Path>> p : pending.entrySet()) {
+ for (Map.Entry<LocalResource, Future<Path>> p : pending.entrySet()) {
p.getValue().get();
- Assert.fail("We localized a file that is not public.");
+ fail("We localized a file that is not public.");
} catch (ExecutionException e) {
- Assert.assertTrue(e.getCause() instanceof IOException);
+ assertTrue(e.getCause() instanceof IOException);
- @Test (timeout=60000)
- public void testDownloadPublicWithStatCache() throws IOException,
+ void testDownloadPublicWithStatCache() throws IOException,
URISyntaxException, InterruptedException, ExecutionException {
Path basedir = files.makeQualified(new Path("target",
// if test directory doesn't have ancestor permission, skip this test
FileSystem f = basedir.getFileSystem(conf);
@@ -336,28 +338,28 @@ public class TestFSDownload {
- final ConcurrentMap<Path,AtomicInteger> counts =
- new ConcurrentHashMap<Path,AtomicInteger>();
- final CacheLoader<Path,Future<FileStatus>> loader =
+ final ConcurrentMap<Path, AtomicInteger> counts =
+ new ConcurrentHashMap<Path, AtomicInteger>();
+ final CacheLoader<Path, Future<FileStatus>> loader =
FSDownload.createStatusCacheLoader(conf);
- final LoadingCache<Path,Future<FileStatus>> statCache =
- CacheBuilder.newBuilder().build(new CacheLoader<Path,Future<FileStatus>>() {
- public Future<FileStatus> load(Path path) throws Exception {
- // increment the count
- AtomicInteger count = counts.get(path);
- if (count == null) {
- count = new AtomicInteger(0);
- AtomicInteger existing = counts.putIfAbsent(path, count);
- if (existing != null) {
- count = existing;
- count.incrementAndGet();
+ final LoadingCache<Path, Future<FileStatus>> statCache =
+ CacheBuilder.newBuilder().build(new CacheLoader<Path, Future<FileStatus>>() {
+ public Future<FileStatus> load(Path path) throws Exception {
+ // increment the count
+ AtomicInteger count = counts.get(path);
+ if (count == null) {
+ count = new AtomicInteger(0);
+ AtomicInteger existing = counts.putIfAbsent(path, count);
+ if (existing != null) {
+ count = existing;
+ count.incrementAndGet();
- // use the default loader
- return loader.load(path);
- });
+ // use the default loader
+ return loader.load(path);
// test FSDownload.isPublic() concurrently
final int fileCount = 3;
@@ -382,11 +384,11 @@ public class TestFSDownload {
List<Future<Boolean>> futures = exec.invokeAll(tasks);
// files should be public
- for (Future<Boolean> future: futures) {
+ for (Future<Boolean> future : futures) {
assertTrue(future.get());
// for each path exactly one file status call should be made
- for (AtomicInteger count: counts.values()) {
+ for (AtomicInteger count : counts.values()) {
assertSame(count.get(), 1);
@@ -394,16 +396,17 @@ public class TestFSDownload {
- public void testDownload() throws IOException, URISyntaxException,
+ void testDownload() throws IOException, URISyntaxException,
@@ -412,16 +415,16 @@ public class TestFSDownload {
int[] sizes = new int[10];
for (int i = 0; i < 10; ++i) {
sizes[i] = rand.nextInt(512) + 512;
LocalResourceVisibility vis = LocalResourceVisibility.PRIVATE;
- if (i%2 == 1) {
+ if (i % 2 == 1) {
vis = LocalResourceVisibility.APPLICATION;
Path p = new Path(basedir, "" + i);
@@ -429,7 +432,7 @@ public class TestFSDownload {
basedir.toString(), sizes[i], conf);
Long.toString(uniqueNumberGenerator.incrementAndGet()));
new FSDownload(files, UserGroupInformation.getCurrentUser(), conf,
@@ -438,29 +441,29 @@ public class TestFSDownload {
- for (Future<Path> path: pending.values()) {
- Assert.assertTrue(path.isDone());
+ for (Future<Path> path : pending.values()) {
+ assertTrue(path.isDone());
Path localized = p.getValue().get();
assertEquals(sizes[Integer.parseInt(localized.getName())], p.getKey()
.getSize());
FileStatus status = files.getFileStatus(localized.getParent());
FsPermission perm = status.getPermission();
- assertEquals("Cache directory permissions are incorrect",
- new FsPermission((short)0755), perm);
+ assertEquals(new FsPermission((short) 0755), perm,
+ "Cache directory permissions are incorrect");
status = files.getFileStatus(localized);
perm = status.getPermission();
- System.out.println("File permission " + perm +
+ System.out.println("File permission " + perm +
" for rsrc vis " + p.getKey().getVisibility().name());
assert(rsrcVis.containsKey(p.getKey()));
- Assert.assertTrue("Private file should be 500",
- perm.toShort() == FSDownload.PRIVATE_FILE_PERMS.toShort());
+ assertTrue(perm.toShort() == FSDownload.PRIVATE_FILE_PERMS.toShort(),
+ "Private file should be 500");
throw new IOException("Failed exec", e);
@@ -524,15 +527,14 @@ public class TestFSDownload {
FileStatus[] childFiles = files.getDefaultFileSystem().listStatus(
filestatus.getPath());
for (FileStatus childfile : childFiles) {
- if(strFileName.endsWith(".ZIP") &&
- childfile.getPath().getName().equals(strFileName) &&
- !childfile.isDirectory()) {
- Assert.fail("Failure...After unzip, there should have been a" +
- " directory formed with zip file name but found a file. "
- + childfile.getPath());
+ if (strFileName.endsWith(".ZIP") && childfile.getPath().getName().equals(strFileName)
+ && !childfile.isDirectory()) {
+ fail("Failure...After unzip, there should have been a"
+ + " directory formed with zip file name but found a file. "
+ + childfile.getPath());
if (childfile.getPath().getName().startsWith("tmp")) {
- Assert.fail("Tmp File should not have been there "
+ fail("Tmp File should not have been there "
+ childfile.getPath());
@@ -543,20 +545,23 @@ public class TestFSDownload {
- public void testDownloadArchive() throws IOException, URISyntaxException,
+ void testDownloadArchive() throws IOException, URISyntaxException,
downloadWithFileType(TEST_FILE_TYPE.TAR);
- public void testDownloadPatternJar() throws IOException, URISyntaxException,
+ void testDownloadPatternJar() throws IOException, URISyntaxException,
downloadWithFileType(TEST_FILE_TYPE.JAR);
- public void testDownloadArchiveZip() throws IOException, URISyntaxException,
+ void testDownloadArchiveZip() throws IOException, URISyntaxException,
downloadWithFileType(TEST_FILE_TYPE.ZIP);
@@ -564,8 +569,9 @@ public class TestFSDownload {
* To test fix for YARN-3029
- public void testDownloadArchiveZipWithTurkishLocale() throws IOException,
+ void testDownloadArchiveZipWithTurkishLocale() throws IOException,
URISyntaxException, InterruptedException {
Locale defaultLocale = Locale.getDefault();
// Set to Turkish
@@ -576,8 +582,9 @@ public class TestFSDownload {
Locale.setDefault(defaultLocale);
- public void testDownloadArchiveTgz() throws IOException, URISyntaxException,
+ void testDownloadArchiveTgz() throws IOException, URISyntaxException,
downloadWithFileType(TEST_FILE_TYPE.TGZ);
@@ -588,11 +595,11 @@ public class TestFSDownload {
FileStatus status = files.getFileStatus(p);
if (status.isDirectory()) {
if (vis == LocalResourceVisibility.PUBLIC) {
- Assert.assertTrue(status.getPermission().toShort() ==
+ assertTrue(status.getPermission().toShort() ==
FSDownload.PUBLIC_DIR_PERMS.toShort());
else {
FSDownload.PRIVATE_DIR_PERMS.toShort());
if (!status.isSymlink()) {
@@ -604,40 +611,41 @@ public class TestFSDownload {
FSDownload.PUBLIC_FILE_PERMS.toShort());
FSDownload.PRIVATE_FILE_PERMS.toShort());
- public void testDirDownload() throws IOException, InterruptedException {
+ void testDirDownload() throws IOException, InterruptedException {
Random rand = new Random();
long sharedSeed = rand.nextLong();
for (int i = 0; i < 5; ++i) {
@@ -646,7 +654,7 @@ public class TestFSDownload {
basedir.toString(), conf);
@@ -655,21 +663,21 @@ public class TestFSDownload {
FileStatus status = files.getFileStatus(localized);
System.out.println("Testing path " + localized);
assert(status.isDirectory());
verifyPermsRecursively(localized.getFileSystem(conf),
files, localized, rsrcVis.get(p.getKey()));
@@ -678,8 +686,9 @@ public class TestFSDownload {
- public void testUniqueDestinationPath() throws Exception {
+ void testUniqueDestinationPath() throws Exception {
TestFSDownload.class.getSimpleName()));
@@ -704,12 +713,12 @@ public class TestFSDownload {
destPath, rsrc);
Future<Path> rPath = singleThreadedExec.submit(fsd);
singleThreadedExec.shutdown();
- while (!singleThreadedExec.awaitTermination(1000, TimeUnit.MILLISECONDS));
- Assert.assertTrue(rPath.isDone());
+ while (!singleThreadedExec.awaitTermination(1000, TimeUnit.MILLISECONDS)) ;
+ assertTrue(rPath.isDone());
// Now FSDownload will not create a random directory to localize the
// resource. Therefore the final localizedPath for the resource should be
// destination directory (passed as an argument) + file name.
- Assert.assertEquals(destPath, rPath.get().getParent());
+ assertEquals(destPath, rPath.get().getParent());
@@ -717,8 +726,9 @@ public class TestFSDownload {
* from modification to the local resource's timestamp on the source FS just
* before the download of this local resource has started.
- public void testResourceTimestampChangeDuringDownload()
+ void testResourceTimestampChangeDuringDownload()
throws IOException, InterruptedException {
@@ -759,7 +769,7 @@ public class TestFSDownload {
FileSystem sourceFs = sourceFsPath.getFileSystem(conf);
sourceFs.setTimes(sourceFsPath, modifiedFSTimestamp, modifiedFSTimestamp);
} catch (URISyntaxException use) {
- Assert.fail("No exception expected.");
+ fail("No exception expected.");
// Execute the FSDownload operation.
@@ -770,19 +780,19 @@ public class TestFSDownload {
exec.awaitTermination(1000, TimeUnit.MILLISECONDS);
- Assert.assertTrue(pending.get(localResource).isDone());
+ assertTrue(pending.get(localResource).isDone());
for (Map.Entry<LocalResource, Future<Path>> p : pending.entrySet()) {
- Assert.fail("Exception expected from timestamp update during download");
+ fail("Exception expected from timestamp update during download");
} catch (ExecutionException ee) {
- Assert.assertTrue(ee.getCause() instanceof IOException);
- Assert.assertTrue("Exception contains original timestamp",
- ee.getMessage().contains(Times.formatISO8601(origLRTimestamp)));
- Assert.assertTrue("Exception contains modified timestamp",
- ee.getMessage().contains(Times.formatISO8601(modifiedFSTimestamp)));
+ assertTrue(ee.getCause() instanceof IOException);
+ assertTrue(ee.getMessage().contains(Times.formatISO8601(origLRTimestamp)),
+ "Exception contains original timestamp");
+ assertTrue(ee.getMessage().contains(Times.formatISO8601(modifiedFSTimestamp)),
+ "Exception contains modified timestamp");
@@ -19,9 +19,12 @@ package org.apache.hadoop.yarn.util;
* Test class to validate the correctness of the {@code LRUCacheHashMap}.
@@ -34,7 +37,7 @@ public class TestLRUCacheHashMap {
* expected.
- public void testLRUCache()
+ void testLRUCache()
throws YarnException, IOException, InterruptedException {
int mapSize = 5;
@@ -48,11 +51,11 @@ public class TestLRUCacheHashMap {
map.put("4", 4);
map.put("5", 5);
- Assert.assertEquals(mapSize, map.size());
+ assertEquals(mapSize, map.size());
// Check if all the elements in the map are from 1 to 5
for (int i = 1; i < mapSize; i++) {
- Assert.assertTrue(map.containsKey(Integer.toString(i)));
+ assertTrue(map.containsKey(Integer.toString(i)));
map.put("6", 6);
@@ -60,14 +63,14 @@ public class TestLRUCacheHashMap {
map.put("7", 7);
map.put("8", 8);
// Check if all the elements in the map are from 5 to 8 and the 3
for (int i = 5; i < mapSize; i++) {
- Assert.assertTrue(map.containsKey("3"));
+ assertTrue(map.containsKey("3"));
@@ -18,20 +18,22 @@
import org.slf4j.Marker;
import org.slf4j.MarkerFactory;
-import org.apache.log4j.LogManager;
-import org.apache.log4j.Level;
-import org.apache.hadoop.util.Time;
+import org.apache.hadoop.util.Time;
+import org.apache.log4j.Level;
+import org.apache.log4j.LogManager;
public class TestLog4jWarningErrorMetricsAppender {
@@ -81,186 +83,179 @@ public class TestLog4jWarningErrorMetricsAppender {
- public void testPurge() throws Exception {
+ void testPurge() throws Exception {
setupAppender(2, 1, 1);
logMessages(Level.ERROR, "test message 1", 1);
cutoff.clear();
cutoff.add(0L);
- Assert.assertEquals(1, appender.getErrorCounts(cutoff).size());
- Assert.assertEquals(1, appender.getErrorCounts(cutoff).get(0).longValue());
- Assert.assertEquals(1, appender.getErrorMessagesAndCounts(cutoff).get(0)
- .size());
+ assertEquals(1, appender.getErrorCounts(cutoff).size());
+ assertEquals(1, appender.getErrorCounts(cutoff).get(0).longValue());
+ assertEquals(1, appender.getErrorMessagesAndCounts(cutoff).get(0)
+ .size());
Thread.sleep(3000);
- Assert.assertEquals(0, appender.getErrorCounts(cutoff).get(0).longValue());
- Assert.assertEquals(0, appender.getErrorMessagesAndCounts(cutoff).get(0)
+ assertEquals(0, appender.getErrorCounts(cutoff).get(0).longValue());
+ assertEquals(0, appender.getErrorMessagesAndCounts(cutoff).get(0)
setupAppender(2, 1000, 2);
logMessages(Level.ERROR, "test message 1", 3);
logMessages(Level.ERROR, "test message 2", 2);
- Assert.assertEquals(5, appender.getErrorCounts(cutoff).get(0).longValue());
- Assert.assertEquals(2, appender.getErrorMessagesAndCounts(cutoff).get(0)
+ assertEquals(5, appender.getErrorCounts(cutoff).get(0).longValue());
+ assertEquals(2, appender.getErrorMessagesAndCounts(cutoff).get(0)
logMessages(Level.ERROR, "test message 3", 3);
- Assert.assertEquals(8, appender.getErrorCounts(cutoff).get(0).longValue());
+ assertEquals(8, appender.getErrorCounts(cutoff).get(0).longValue());
- public void testErrorCounts() throws Exception {
+ void testErrorCounts() throws Exception {
setupAppender(100, 100, 100);
logMessages(Level.ERROR, "test message 1", 2);
logMessages(Level.ERROR, "test message 2", 3);
- Assert.assertEquals(1, appender.getWarningCounts(cutoff).size());
- Assert
- .assertEquals(0, appender.getWarningCounts(cutoff).get(0).longValue());
+ assertEquals(1, appender.getWarningCounts(cutoff).size());
+ assertEquals(0, appender.getWarningCounts(cutoff).get(0).longValue());
cutoff.add(Time.now() / 1000);
logMessages(Level.ERROR, "test message 3", 2);
- Assert.assertEquals(2, appender.getErrorCounts(cutoff).size());
- Assert.assertEquals(2, appender.getWarningCounts(cutoff).size());
- Assert.assertEquals(7, appender.getErrorCounts(cutoff).get(0).longValue());
- Assert.assertEquals(2, appender.getErrorCounts(cutoff).get(1).longValue());
- .assertEquals(0, appender.getWarningCounts(cutoff).get(1).longValue());
+ assertEquals(2, appender.getErrorCounts(cutoff).size());
+ assertEquals(2, appender.getWarningCounts(cutoff).size());
+ assertEquals(7, appender.getErrorCounts(cutoff).get(0).longValue());
+ assertEquals(2, appender.getErrorCounts(cutoff).get(1).longValue());
+ assertEquals(0, appender.getWarningCounts(cutoff).get(1).longValue());
- public void testWarningCounts() throws Exception {
+ void testWarningCounts() throws Exception {
logMessages(Level.WARN, "test message 1", 2);
logMessages(Level.WARN, "test message 2", 3);
- .assertEquals(5, appender.getWarningCounts(cutoff).get(0).longValue());
+ assertEquals(5, appender.getWarningCounts(cutoff).get(0).longValue());
logMessages(Level.WARN, "test message 3", 2);
- Assert.assertEquals(0, appender.getErrorCounts(cutoff).get(1).longValue());
- .assertEquals(7, appender.getWarningCounts(cutoff).get(0).longValue());
- .assertEquals(2, appender.getWarningCounts(cutoff).get(1).longValue());
+ assertEquals(0, appender.getErrorCounts(cutoff).get(1).longValue());
+ assertEquals(7, appender.getWarningCounts(cutoff).get(0).longValue());
+ assertEquals(2, appender.getWarningCounts(cutoff).get(1).longValue());
- public void testWarningMessages() throws Exception {
+ void testWarningMessages() throws Exception {
- Assert.assertEquals(1, appender.getErrorMessagesAndCounts(cutoff).size());
- Assert.assertEquals(1, appender.getWarningMessagesAndCounts(cutoff).size());
+ assertEquals(1, appender.getErrorMessagesAndCounts(cutoff).size());
+ assertEquals(1, appender.getWarningMessagesAndCounts(cutoff).size());
Map<String, Log4jWarningErrorMetricsAppender.Element> errorsMap =
appender.getErrorMessagesAndCounts(cutoff).get(0);
Map<String, Log4jWarningErrorMetricsAppender.Element> warningsMap =
appender.getWarningMessagesAndCounts(cutoff).get(0);
- Assert.assertEquals(0, errorsMap.size());
- Assert.assertEquals(2, warningsMap.size());
- Assert.assertTrue(warningsMap.containsKey("test message 1"));
- Assert.assertTrue(warningsMap.containsKey("test message 2"));
+ assertEquals(0, errorsMap.size());
+ assertEquals(2, warningsMap.size());
+ assertTrue(warningsMap.containsKey("test message 1"));
+ assertTrue(warningsMap.containsKey("test message 2"));
Log4jWarningErrorMetricsAppender.Element msg1Info = warningsMap.get("test message 1");
Log4jWarningErrorMetricsAppender.Element msg2Info = warningsMap.get("test message 2");
- Assert.assertEquals(2, msg1Info.count.intValue());
- Assert.assertEquals(3, msg2Info.count.intValue());
+ assertEquals(2, msg1Info.count.intValue());
+ assertEquals(3, msg2Info.count.intValue());
- Assert.assertEquals(2, appender.getErrorMessagesAndCounts(cutoff).size());
- Assert.assertEquals(2, appender.getWarningMessagesAndCounts(cutoff).size());
+ assertEquals(2, appender.getErrorMessagesAndCounts(cutoff).size());
+ assertEquals(2, appender.getWarningMessagesAndCounts(cutoff).size());
errorsMap = appender.getErrorMessagesAndCounts(cutoff).get(0);
warningsMap = appender.getWarningMessagesAndCounts(cutoff).get(0);
- Assert.assertEquals(3, warningsMap.size());
- Assert.assertTrue(warningsMap.containsKey("test message 3"));
+ assertEquals(3, warningsMap.size());
+ assertTrue(warningsMap.containsKey("test message 3"));
errorsMap = appender.getErrorMessagesAndCounts(cutoff).get(1);
warningsMap = appender.getWarningMessagesAndCounts(cutoff).get(1);
- Assert.assertEquals(1, warningsMap.size());
+ assertEquals(1, warningsMap.size());
Log4jWarningErrorMetricsAppender.Element msg3Info = warningsMap.get("test message 3");
- Assert.assertEquals(2, msg3Info.count.intValue());
+ assertEquals(2, msg3Info.count.intValue());
- public void testErrorMessages() throws Exception {
+ void testErrorMessages() throws Exception {
- Assert.assertEquals(2, errorsMap.size());
- Assert.assertEquals(0, warningsMap.size());
- Assert.assertTrue(errorsMap.containsKey("test message 1"));
- Assert.assertTrue(errorsMap.containsKey("test message 2"));
+ assertEquals(2, errorsMap.size());
+ assertEquals(0, warningsMap.size());
+ assertTrue(errorsMap.containsKey("test message 1"));
+ assertTrue(errorsMap.containsKey("test message 2"));
Log4jWarningErrorMetricsAppender.Element msg1Info = errorsMap.get("test message 1");
Log4jWarningErrorMetricsAppender.Element msg2Info = errorsMap.get("test message 2");
- Assert.assertEquals(3, errorsMap.size());
- Assert.assertTrue(errorsMap.containsKey("test message 3"));
+ assertEquals(3, errorsMap.size());
+ assertTrue(errorsMap.containsKey("test message 3"));
- Assert.assertEquals(1, errorsMap.size());
+ assertEquals(1, errorsMap.size());
Log4jWarningErrorMetricsAppender.Element msg3Info = errorsMap.get("test message 3");
- public void testInfoDebugTrace() {
+ void testInfoDebugTrace() {
logMessages(Level.INFO, "test message 1", 2);
logMessages(Level.DEBUG, "test message 2", 2);
logMessages(Level.TRACE, "test message 3", 2);
- Assert.assertEquals(0, appender.getWarningMessagesAndCounts(cutoff).get(0)
+ assertEquals(0, appender.getWarningMessagesAndCounts(cutoff).get(0)
@@ -18,11 +18,6 @@
-import static org.apache.hadoop.yarn.util.ProcfsBasedProcessTree.KB_TO_BYTES;
-import static org.apache.hadoop.yarn.util.ResourceCalculatorProcessTree.UNAVAILABLE;
import java.io.BufferedWriter;
@@ -37,9 +32,13 @@ import java.util.Vector;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.FileUtils;
@@ -52,19 +51,23 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.util.ProcfsBasedProcessTree.MemInfo;
import org.apache.hadoop.yarn.util.ProcfsBasedProcessTree.ProcessSmapMemoryInfo;
import org.apache.hadoop.yarn.util.ProcfsBasedProcessTree.ProcessTreeSmapMemInfo;
+import static org.apache.hadoop.yarn.util.ProcfsBasedProcessTree.KB_TO_BYTES;
+import static org.apache.hadoop.yarn.util.ResourceCalculatorProcessTree.UNAVAILABLE;
* A JUnit test to test ProcfsBasedProcessTree.
public class TestProcfsBasedProcessTree {
- private static final Logger LOG = LoggerFactory
- .getLogger(TestProcfsBasedProcessTree.class);
- protected static File TEST_ROOT_DIR = new File("target",
- TestProcfsBasedProcessTree.class.getName() + "-localDir");
+ private static final Logger LOG = LoggerFactory.getLogger(TestProcfsBasedProcessTree.class);
+ protected static File TEST_ROOT_DIR =
+ new File("target", TestProcfsBasedProcessTree.class.getName() + "-localDir");
private ShellCommandExecutor shexec = null;
private String pidFile, lowestDescendant, lostDescendant;
@@ -111,21 +114,22 @@ public class TestProcfsBasedProcessTree {
return getPidFromPidFile(pidFile);
assumeTrue(Shell.LINUX);
FileContext.getLocalFSFileContext().delete(
new Path(TEST_ROOT_DIR.getAbsolutePath()), true);
- @Test(timeout = 30000)
- public void testProcessTree() throws Exception {
+ @Timeout(30000)
+ void testProcessTree() throws Exception {
- Assert.assertTrue(ProcfsBasedProcessTree.isAvailable());
+ assertTrue(ProcfsBasedProcessTree.isAvailable());
- Assert.assertTrue("ProcfsBaseProcessTree should be available on Linux",
+ assertTrue(false,
+ "ProcfsBaseProcessTree should be available on Linux");
// create shell script
@@ -156,7 +160,7 @@ public class TestProcfsBasedProcessTree {
+ "(sleep 300&\n"
+ "echo $! > " + lostDescendant + ")\n"
+ " while true\n do\n" + " sleep 5\n" + " done\n" + "fi",
- StandardCharsets.UTF_8);
+ StandardCharsets.UTF_8);
Thread t = new RogueTaskThread();
t.start();
@@ -182,8 +186,8 @@ public class TestProcfsBasedProcessTree {
// Verify the orphaned pid is In process tree
String lostpid = getPidFromPidFile(lostDescendant);
LOG.info("Orphaned pid: " + lostpid);
- Assert.assertTrue("Child process owned by init escaped process tree.",
- p.contains(lostpid));
+ assertTrue(p.contains(lostpid),
+ "Child process owned by init escaped process tree.");
// Get the process-tree dump
String processTreeDump = p.getProcessTreeDump();
@@ -208,18 +212,18 @@ public class TestProcfsBasedProcessTree {
LOG.info("Process-tree dump follows: \n" + processTreeDump);
- Assert.assertTrue("Process-tree dump doesn't start with a proper header",
- processTreeDump.startsWith("\t|- PID PPID PGRPID SESSID CMD_NAME "
- + "USER_MODE_TIME(MILLIS) SYSTEM_TIME(MILLIS) VMEM_USAGE(BYTES) "
- + "RSSMEM_USAGE(PAGES) FULL_CMD_LINE\n"));
+ assertTrue(processTreeDump.startsWith("\t|- PID PPID PGRPID SESSID CMD_NAME "
+ + "USER_MODE_TIME(MILLIS) SYSTEM_TIME(MILLIS) VMEM_USAGE(BYTES) "
+ + "RSSMEM_USAGE(PAGES) FULL_CMD_LINE\n"),
+ "Process-tree dump doesn't start with a proper header");
for (int i = N; i >= 0; i--) {
String cmdLineDump =
"\\|- [0-9]+ [0-9]+ [0-9]+ [0-9]+ \\(sh\\)"
+ " [0-9]+ [0-9]+ [0-9]+ [0-9]+ sh " + shellScript + " " + i;
Pattern pat = Pattern.compile(cmdLineDump);
Matcher mat = pat.matcher(processTreeDump);
- Assert.assertTrue("Process-tree dump doesn't contain the cmdLineDump of "
- + i + "th process!", mat.find());
+ assertTrue(mat.find(), "Process-tree dump doesn't contain the cmdLineDump of "
+ + i + "th process!");
// Not able to join thread sometimes when forking with large N.
@@ -232,12 +236,12 @@ public class TestProcfsBasedProcessTree {
// ProcessTree is gone now. Any further calls should be sane.
p.updateProcessTree();
- Assert.assertFalse("ProcessTree must have been gone", isAlive(pid));
- "vmem for the gone-process is " + p.getVirtualMemorySize()
- + " . It should be UNAVAILABLE(-1).",
- p.getVirtualMemorySize() == UNAVAILABLE);
- Assert.assertEquals("[ ]", p.toString());
+ assertFalse(isAlive(pid), "ProcessTree must have been gone");
+ p.getVirtualMemorySize() == UNAVAILABLE,
+ "vmem for the gone-process is " + p.getVirtualMemorySize()
+ + " . It should be UNAVAILABLE(-1).");
+ assertEquals("[ ]", p.toString());
protected ProcfsBasedProcessTree createProcessTree(String pid) {
@@ -395,11 +399,12 @@ public class TestProcfsBasedProcessTree {
* if there was a problem setting up the fake procfs directories or
* files.
- public void testCpuAndMemoryForProcessTree() throws IOException {
+ void testCpuAndMemoryForProcessTree() throws IOException {
// test processes
- String[] pids = { "100", "200", "300", "400" };
+ String[] pids = {"100", "200", "300", "400"};
ControlledClock testClock = new ControlledClock();
testClock.setTime(0);
// create the fake procfs root directory.
@@ -442,34 +447,35 @@ public class TestProcfsBasedProcessTree {
processTree.updateProcessTree();
// verify virtual memory
- Assert.assertEquals("Virtual memory does not match", 600000L,
- processTree.getVirtualMemorySize());
+ assertEquals(600000L, processTree.getVirtualMemorySize(), "Virtual memory does not match");
// verify rss memory
long cumuRssMem =
ProcfsBasedProcessTree.PAGE_SIZE > 0
- ? 600L * ProcfsBasedProcessTree.PAGE_SIZE :
- ResourceCalculatorProcessTree.UNAVAILABLE;
- Assert.assertEquals("rss memory does not match", cumuRssMem,
- processTree.getRssMemorySize());
+ ? 600L * ProcfsBasedProcessTree.PAGE_SIZE :
+ ResourceCalculatorProcessTree.UNAVAILABLE;
+ assertEquals(cumuRssMem,
+ processTree.getRssMemorySize(),
+ "rss memory does not match");
// verify cumulative cpu time
long cumuCpuTime =
ProcfsBasedProcessTree.JIFFY_LENGTH_IN_MILLIS > 0
? 7200L * ProcfsBasedProcessTree.JIFFY_LENGTH_IN_MILLIS : 0L;
- Assert.assertEquals("Cumulative cpu time does not match", cumuCpuTime,
- processTree.getCumulativeCpuTime());
+ assertEquals(cumuCpuTime,
+ processTree.getCumulativeCpuTime(),
+ "Cumulative cpu time does not match");
// verify CPU usage
- Assert.assertEquals("Percent CPU time should be set to -1 initially",
- -1.0, processTree.getCpuUsagePercent(),
- 0.01);
+ assertEquals(-1.0, processTree.getCpuUsagePercent(),
+ 0.01,
+ "Percent CPU time should be set to -1 initially");
// Check by enabling smaps
setSmapsInProceTree(processTree, true);
// anon (exclude r-xs,r--s)
- Assert.assertEquals("rss memory does not match",
- (20 * KB_TO_BYTES * 3), processTree.getRssMemorySize());
+ assertEquals((20 * KB_TO_BYTES * 3), processTree.getRssMemorySize(),
// test the cpu time again to see if it cumulates
procInfos[0] =
@@ -490,8 +496,9 @@ public class TestProcfsBasedProcessTree {
cumuCpuTime =
? 9400L * ProcfsBasedProcessTree.JIFFY_LENGTH_IN_MILLIS : 0L;
double expectedCpuUsagePercent =
(ProcfsBasedProcessTree.JIFFY_LENGTH_IN_MILLIS > 0) ?
@@ -500,11 +507,12 @@ public class TestProcfsBasedProcessTree {
// expectedCpuUsagePercent is given by (94000L - 72000) * 100/
// 200000;
// which in this case is 11. Lets verify that first
- Assert.assertEquals(11, expectedCpuUsagePercent, 0.001);
- Assert.assertEquals("Percent CPU time is not correct expected " +
- expectedCpuUsagePercent, expectedCpuUsagePercent,
+ assertEquals(11, expectedCpuUsagePercent, 0.001);
+ assertEquals(expectedCpuUsagePercent,
processTree.getCpuUsagePercent(),
+ "Percent CPU time is not correct expected " +
+ expectedCpuUsagePercent);
FileUtil.fullyDelete(procfsRootDir);
@@ -529,8 +537,9 @@ public class TestProcfsBasedProcessTree {
- public void testMemForOlderProcesses() throws IOException {
+ void testMemForOlderProcesses() throws IOException {
testMemForOlderProcesses(false);
testMemForOlderProcesses(true);
@@ -576,8 +585,7 @@ public class TestProcfsBasedProcessTree {
setSmapsInProceTree(processTree, smapEnabled);
- Assert.assertEquals("Virtual memory does not match", 700000L,
+ assertEquals(700000L, processTree.getVirtualMemorySize(), "Virtual memory does not match");
// write one more process as child of 100.
String[] newPids = { "500" };
@@ -594,36 +602,31 @@ public class TestProcfsBasedProcessTree {
// check memory includes the new process.
- Assert.assertEquals("vmem does not include new process",
- 1200000L, processTree.getVirtualMemorySize());
+ assertEquals(1200000L, processTree.getVirtualMemorySize(),
+ "vmem does not include new process");
if (!smapEnabled) {
- long cumuRssMem =
- ProcfsBasedProcessTree.PAGE_SIZE > 0
- ? 1200L * ProcfsBasedProcessTree.PAGE_SIZE :
- Assert.assertEquals("rssmem does not include new process",
- cumuRssMem, processTree.getRssMemorySize());
+ long cumuRssMem = ProcfsBasedProcessTree.PAGE_SIZE > 0 ?
+ 1200L * ProcfsBasedProcessTree.PAGE_SIZE :
+ assertEquals(cumuRssMem, processTree.getRssMemorySize(),
+ "rssmem does not include new process");
- 20 * KB_TO_BYTES * 4, processTree.getRssMemorySize());
+ assertEquals(20 * KB_TO_BYTES * 4, processTree.getRssMemorySize(),
// however processes older than 1 iteration will retain the older value
- "vmem shouldn't have included new process", 700000L,
- processTree.getVirtualMemorySize(1));
+ assertEquals(700000L, processTree.getVirtualMemorySize(1),
+ "vmem shouldn't have included new process");
- ? 700L * ProcfsBasedProcessTree.PAGE_SIZE :
- "rssmem shouldn't have included new process", cumuRssMem,
- processTree.getRssMemorySize(1));
+ 700L * ProcfsBasedProcessTree.PAGE_SIZE :
+ assertEquals(cumuRssMem, processTree.getRssMemorySize(1),
+ "rssmem shouldn't have included new process");
- "rssmem shouldn't have included new process",
- 20 * KB_TO_BYTES * 3, processTree.getRssMemorySize(1));
+ assertEquals(20 * KB_TO_BYTES * 3, processTree.getRssMemorySize(1),
// one more process
@@ -643,49 +646,41 @@ public class TestProcfsBasedProcessTree {
// processes older than 2 iterations should be same as before.
- "vmem shouldn't have included new processes", 700000L,
- processTree.getVirtualMemorySize(2));
+ assertEquals(700000L, processTree.getVirtualMemorySize(2),
+ "vmem shouldn't have included new processes");
? 700L * ProcfsBasedProcessTree.PAGE_SIZE :
ResourceCalculatorProcessTree.UNAVAILABLE;
- "rssmem shouldn't have included new processes",
- cumuRssMem, processTree.getRssMemorySize(2));
+ assertEquals(cumuRssMem, processTree.getRssMemorySize(2),
+ "rssmem shouldn't have included new processes");
- 20 * KB_TO_BYTES * 3, processTree.getRssMemorySize(2));
+ assertEquals(20 * KB_TO_BYTES * 3, processTree.getRssMemorySize(2),
// processes older than 1 iteration should not include new process,
// but include process 500
- "vmem shouldn't have included new processes", 1200000L,
+ assertEquals(1200000L, processTree.getVirtualMemorySize(1),
? 1200L * ProcfsBasedProcessTree.PAGE_SIZE :
- cumuRssMem, processTree.getRssMemorySize(1));
- 20 * KB_TO_BYTES * 4, processTree.getRssMemorySize(1));
+ assertEquals(20 * KB_TO_BYTES * 4, processTree.getRssMemorySize(1),
// no processes older than 3 iterations
- "Getting non-zero vmem for processes older than 3 iterations",
- 0, processTree.getVirtualMemorySize(3));
- "Getting non-zero rssmem for processes older than 3 iterations",
- 0, processTree.getRssMemorySize(3));
+ assertEquals(0, processTree.getVirtualMemorySize(3),
+ "Getting non-zero vmem for processes older than 3 iterations");
+ assertEquals(0, processTree.getRssMemorySize(3),
+ "Getting non-zero rssmem for processes older than 3 iterations");
@@ -700,8 +695,9 @@ public class TestProcfsBasedProcessTree {
- public void testDestroyProcessTree() throws IOException {
+ void testDestroyProcessTree() throws IOException {
// test process
String pid = "100";
@@ -715,8 +711,8 @@ public class TestProcfsBasedProcessTree {
SystemClock.getInstance());
// Let us not create stat file for pid 100.
- Assert.assertTrue(ProcfsBasedProcessTree.checkPidPgrpidForMatch(pid,
- procfsRootDir.getAbsolutePath()));
+ assertTrue(ProcfsBasedProcessTree.checkPidPgrpidForMatch(pid,
+ procfsRootDir.getAbsolutePath()));
@@ -727,10 +723,11 @@ public class TestProcfsBasedProcessTree {
* @throws IOException
- public void testProcessTreeDump() throws IOException {
+ void testProcessTreeDump() throws IOException {
- String[] pids = { "100", "200", "300", "400", "500", "600" };
+ String[] pids = {"100", "200", "300", "400", "500", "600"};
File procfsRootDir = new File(TEST_ROOT_DIR, "proc");
@@ -790,29 +787,29 @@ public class TestProcfsBasedProcessTree {
String processTreeDump = processTree.getProcessTreeDump();
ProcessStatInfo p = procInfos[i];
- "Process-tree dump doesn't contain the cmdLineDump of process "
- + p.pid,
- processTreeDump.contains("\t|- " + p.pid + " " + p.ppid + " "
- + p.pgrpId + " " + p.session + " (" + p.name + ") " + p.utime
- + " " + p.stime + " " + p.vmem + " " + p.rssmemPage + " "
- + cmdLines[i]));
+ processTreeDump.contains("\t|- " + p.pid + " " + p.ppid + " "
+ + p.pgrpId + " " + p.session + " (" + p.name + ") " + p.utime
+ + " " + p.stime + " " + p.vmem + " " + p.rssmemPage + " "
+ + cmdLines[i]),
+ "Process-tree dump doesn't contain the cmdLineDump of process "
+ + p.pid);
// 600 should not be in the dump
ProcessStatInfo p = procInfos[5];
- "Process-tree dump shouldn't contain the cmdLineDump of process "
- + p.pgrpId + " " + p.session + " (" + p.name + ") " + p.utime + " "
- + p.stime + " " + p.vmem + " " + cmdLines[5]));
+ + p.pgrpId + " " + p.session + " (" + p.name + ") " + p.utime + " "
+ + p.stime + " " + p.vmem + " " + cmdLines[5]),
+ "Process-tree dump shouldn't contain the cmdLineDump of process "
@@ -887,11 +884,11 @@ public class TestProcfsBasedProcessTree {
public static void setupProcfsRootDir(File procfsRootDir) throws IOException {
// cleanup any existing process root dir.
if (procfsRootDir.exists()) {
- Assert.assertTrue(FileUtil.fullyDelete(procfsRootDir));
+ assertTrue(FileUtil.fullyDelete(procfsRootDir));
// create afresh
- Assert.assertTrue(procfsRootDir.mkdirs());
+ assertTrue(procfsRootDir.mkdirs());
@@ -24,16 +24,19 @@ import java.util.ArrayList;
import org.apache.hadoop.net.DNSToSwitchMapping;
import org.apache.hadoop.net.NetworkTopology;
import org.apache.hadoop.net.Node;
public class TestRackResolver {
@@ -41,7 +44,7 @@ public class TestRackResolver {
LoggerFactory.getLogger(TestRackResolver.class);
private static final String invalidHost = "invalidHost";
RackResolver.reset();
@@ -54,8 +57,7 @@ public class TestRackResolver {
public List<String> resolve(List<String> hostList) {
// Only one host at a time
- Assert.assertTrue("hostList size is " + hostList.size(),
- hostList.size() <= 1);
+ assertTrue(hostList.size() <= 1, "hostList size is " + hostList.size());
List<String> returnList = new ArrayList<String>();
if (hostList.isEmpty()) {
return returnList;
@@ -74,7 +76,7 @@ public class TestRackResolver {
// I should not be reached again as RackResolver is supposed to do
// caching.
- Assert.assertTrue(numHost1 <= 1);
+ assertTrue(numHost1 <= 1);
@@ -112,7 +114,7 @@ public class TestRackResolver {
- Assert.assertEquals(returnList.size(), hostList.size());
+ assertEquals(returnList.size(), hostList.size());
@@ -127,11 +129,11 @@ public class TestRackResolver {
- public void testCaching() {
+ void testCaching() {
conf.setClass(
- CommonConfigurationKeysPublic.NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY,
- MyResolver.class, DNSToSwitchMapping.class);
+ CommonConfigurationKeysPublic.NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY,
+ MyResolver.class, DNSToSwitchMapping.class);
RackResolver.init(conf);
InetAddress iaddr = InetAddress.getByName("host1");
@@ -140,15 +142,15 @@ public class TestRackResolver {
// Ignore if not found
Node node = RackResolver.resolve("host1");
- Assert.assertEquals("/rack1", node.getNetworkLocation());
+ assertEquals("/rack1", node.getNetworkLocation());
node = RackResolver.resolve("host1");
node = RackResolver.resolve(invalidHost);
- Assert.assertEquals(NetworkTopology.DEFAULT_RACK, node.getNetworkLocation());
+ assertEquals(NetworkTopology.DEFAULT_RACK, node.getNetworkLocation());
- public void testMultipleHosts() {
+ void testMultipleHosts() {
CommonConfigurationKeysPublic
@@ -158,9 +160,9 @@ public class TestRackResolver {
List<Node> nodes = RackResolver.resolve(
Arrays.asList("host1", invalidHost, "host2"));
- Assert.assertEquals("/rack1", nodes.get(0).getNetworkLocation());
- Assert.assertEquals(NetworkTopology.DEFAULT_RACK,
+ assertEquals("/rack1", nodes.get(0).getNetworkLocation());
+ assertEquals(NetworkTopology.DEFAULT_RACK,
nodes.get(1).getNetworkLocation());
- Assert.assertEquals("/rack2", nodes.get(2).getNetworkLocation());
+ assertEquals("/rack2", nodes.get(2).getNetworkLocation());
@@ -18,17 +18,19 @@
import org.apache.hadoop.net.ScriptBasedMapping;
public class TestRackResolverScriptBasedMapping {
- public void testScriptName() {
+ void testScriptName() {
conf
.setClass(
@@ -38,7 +40,7 @@ public class TestRackResolverScriptBasedMapping {
conf.set(CommonConfigurationKeysPublic.NET_TOPOLOGY_SCRIPT_FILE_NAME_KEY,
"testScript");
- Assert.assertEquals(RackResolver.getDnsToSwitchMapping().toString(),
+ assertEquals(RackResolver.getDnsToSwitchMapping().toString(),
"script-based mapping with script testScript");
@@ -17,11 +17,14 @@
-import static org.hamcrest.core.IsInstanceOf.*;
-import static org.hamcrest.core.IsSame.*;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.core.IsInstanceOf.instanceOf;
+import static org.hamcrest.core.IsSame.sameInstance;
* A JUnit test to test {@link ResourceCalculatorPlugin}
@@ -73,7 +76,7 @@ public class TestResourceCalculatorProcessTree {
- public void testCreateInstance() {
+ void testCreateInstance() {
ResourceCalculatorProcessTree tree;
tree = ResourceCalculatorProcessTree.getResourceCalculatorProcessTree("1", EmptyProcessTree.class, new Configuration());
assertNotNull(tree);
@@ -81,7 +84,7 @@ public class TestResourceCalculatorProcessTree {
- public void testCreatedInstanceConfigured() {
+ void testCreatedInstanceConfigured() {
tree = ResourceCalculatorProcessTree.getResourceCalculatorProcessTree("1", EmptyProcessTree.class, conf);
@@ -17,34 +17,35 @@
public class TestTimelineServiceHelper {
- public void testMapCastToHashMap() {
+ void testMapCastToHashMap() {
// Test null map be casted to null
Map<String, String> nullMap = null;
- Assert.assertNull(TimelineServiceHelper.mapCastToHashMap(nullMap));
+ assertNull(TimelineServiceHelper.mapCastToHashMap(nullMap));
// Test empty hashmap be casted to a empty hashmap
Map<String, String> emptyHashMap = new HashMap<String, String>();
TimelineServiceHelper.mapCastToHashMap(emptyHashMap).size(), 0);
// Test empty non-hashmap be casted to a empty hashmap
Map<String, String> emptyTreeMap = new TreeMap<String, String>();
TimelineServiceHelper.mapCastToHashMap(emptyTreeMap).size(), 0);
// Test non-empty hashmap be casted to hashmap correctly
@@ -52,7 +53,7 @@ public class TestTimelineServiceHelper {
String key = "KEY";
String value = "VALUE";
firstHashMap.put(key, value);
TimelineServiceHelper.mapCastToHashMap(firstHashMap), firstHashMap);
// Test non-empty non-hashmap is casted correctly.
@@ -60,7 +61,7 @@ public class TestTimelineServiceHelper {
firstTreeMap.put(key, value);
HashMap<String, String> alternateHashMap =
TimelineServiceHelper.mapCastToHashMap(firstTreeMap);
- Assert.assertEquals(firstTreeMap.size(), alternateHashMap.size());
+ assertEquals(firstTreeMap.size(), alternateHashMap.size());
assertThat(alternateHashMap.get(key)).isEqualTo(value);
// Test complicated hashmap be casted correctly
@@ -69,7 +70,7 @@ public class TestTimelineServiceHelper {
Set<String> hashSet = new HashSet<String>();
hashSet.add(value);
complicatedHashMap.put(key, hashSet);
TimelineServiceHelper.mapCastToHashMap(complicatedHashMap),
complicatedHashMap);
@@ -77,7 +78,7 @@ public class TestTimelineServiceHelper {
Map<String, Set<String>> complicatedTreeMap =
new TreeMap<String, Set<String>>();
complicatedTreeMap.put(key, hashSet);
TimelineServiceHelper.mapCastToHashMap(complicatedTreeMap).get(key),
hashSet);
@@ -18,64 +18,64 @@
import java.text.SimpleDateFormat;
import java.util.Date;
import static org.apache.hadoop.yarn.util.Times.ISO8601_DATE_FORMAT;
public class TestTimes {
- public void testNegativeStartTimes() {
+ void testNegativeStartTimes() {
long elapsed = Times.elapsed(-5, 10, true);
- Assert.assertEquals("Elapsed time is not 0", 0, elapsed);
+ assertEquals(0, elapsed, "Elapsed time is not 0");
elapsed = Times.elapsed(-5, 10, false);
- Assert.assertEquals("Elapsed time is not -1", -1, elapsed);
+ assertEquals(-1, elapsed, "Elapsed time is not -1");
- public void testNegativeFinishTimes() {
+ void testNegativeFinishTimes() {
long elapsed = Times.elapsed(5, -10, false);
- public void testNegativeStartandFinishTimes() {
+ void testNegativeStartandFinishTimes() {
long elapsed = Times.elapsed(-5, -10, false);
- public void testPositiveStartandFinishTimes() {
+ void testPositiveStartandFinishTimes() {
long elapsed = Times.elapsed(5, 10, true);
- Assert.assertEquals("Elapsed time is not 5", 5, elapsed);
+ assertEquals(5, elapsed, "Elapsed time is not 5");
elapsed = Times.elapsed(5, 10, false);
- public void testFinishTimesAheadOfStartTimes() {
+ void testFinishTimesAheadOfStartTimes() {
long elapsed = Times.elapsed(10, 5, true);
elapsed = Times.elapsed(10, 5, false);
// use Long.MAX_VALUE to ensure started time is after the current one
elapsed = Times.elapsed(Long.MAX_VALUE, 0, true);
- public void validateISO() throws IOException {
+ void validateISO() throws IOException {
SimpleDateFormat isoFormat = new SimpleDateFormat(ISO8601_DATE_FORMAT);
for (int i = 0; i < 1000; i++) {
String instant = Times.formatISO8601(now);
String date = isoFormat.format(new Date(now));
- Assert.assertEquals(date, instant);
+ assertEquals(date, instant);
@@ -18,13 +18,14 @@
import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
public class TestWindowsBasedProcessTree {
private static final Logger LOG = LoggerFactory
@@ -42,12 +43,13 @@ public class TestWindowsBasedProcessTree {
- @Test (timeout = 30000)
- public void tree() {
+ void tree() {
assumeWindows();
- assertTrue("WindowsBasedProcessTree should be available on Windows",
- WindowsBasedProcessTree.isAvailable());
+ assertTrue(WindowsBasedProcessTree.isAvailable(),
+ "WindowsBasedProcessTree should be available on Windows");
long elapsedTimeBetweenUpdatesMsec = 0;
testClock.setTime(elapsedTimeBetweenUpdatesMsec);
@@ -72,8 +74,7 @@ public class TestWindowsBasedProcessTree {
assertTrue(pTree.getRssMemorySize(1) == 2048);
assertTrue(pTree.getCumulativeCpuTime() == 3000);
assertTrue(pTree.getCpuUsagePercent() == 200);
- Assert.assertEquals("Percent CPU time is not correct",
- pTree.getCpuUsagePercent(), 200, 0.01);
+ assertEquals(pTree.getCpuUsagePercent(), 200, 0.01, "Percent CPU time is not correct");
pTree.infoStr = "3524,1024,1024,1500\r\n2844,1024,1024,1500\r\n";
elapsedTimeBetweenUpdatesMsec = 2000;
@@ -84,7 +85,6 @@ public class TestWindowsBasedProcessTree {
assertTrue(pTree.getRssMemorySize() == 2048);
assertTrue(pTree.getRssMemorySize(2) == 2048);
assertTrue(pTree.getCumulativeCpuTime() == 4000);
- pTree.getCpuUsagePercent(), 0, 0.01);
+ assertEquals(pTree.getCpuUsagePercent(), 0, 0.01, "Percent CPU time is not correct");
@@ -20,39 +20,38 @@ package org.apache.hadoop.yarn.util;
* A JUnit test to test {@link YarnVersionInfo}
public class TestYarnVersionInfo {
* Test the yarn version info routines.
- public void versionInfoGenerated() throws IOException {
+ void versionInfoGenerated() throws IOException {
// can't easily know what the correct values are going to be so just
// make sure they aren't Unknown
- assertNotEquals("getVersion returned Unknown",
- "Unknown", YarnVersionInfo.getVersion());
- assertNotEquals("getUser returned Unknown",
- "Unknown", YarnVersionInfo.getUser());
- assertNotEquals("getSrcChecksum returned Unknown",
- "Unknown", YarnVersionInfo.getSrcChecksum());
+ assertNotEquals("Unknown", YarnVersionInfo.getVersion(), "getVersion returned Unknown");
+ assertNotEquals("Unknown", YarnVersionInfo.getUser(), "getUser returned Unknown");
+ assertNotEquals("Unknown", YarnVersionInfo.getSrcChecksum(), "getSrcChecksum returned Unknown");
// these could be Unknown if the VersionInfo generated from code not in svn or git
// so just check that they return something
- assertNotNull("getUrl returned null", YarnVersionInfo.getUrl());
- assertNotNull("getRevision returned null", YarnVersionInfo.getRevision());
- assertNotNull("getBranch returned null", YarnVersionInfo.getBranch());
+ assertNotNull(YarnVersionInfo.getUrl(), "getUrl returned null");
+ assertNotNull(YarnVersionInfo.getRevision(), "getRevision returned null");
+ assertNotNull(YarnVersionInfo.getBranch(), "getBranch returned null");
- assertTrue("getBuildVersion check doesn't contain: source checksum",
- YarnVersionInfo.getBuildVersion().contains("source checksum"));
+ assertTrue(YarnVersionInfo.getBuildVersion().contains("source checksum"),
+ "getBuildVersion check doesn't contain: source checksum");
@@ -16,14 +16,6 @@
package org.apache.hadoop.yarn.util.resource;
-import org.apache.hadoop.util.Lists;
-import org.apache.hadoop.yarn.LocalConfigurationProvider;
-import org.apache.hadoop.yarn.api.protocolrecords.ResourceTypes;
-import org.apache.hadoop.yarn.api.records.ResourceInformation;
-import org.apache.hadoop.yarn.exceptions.YarnException;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
@@ -33,6 +25,14 @@ import java.util.Map;
import java.util.stream.IntStream;
+import org.apache.hadoop.util.Lists;
+import org.apache.hadoop.yarn.LocalConfigurationProvider;
+import org.apache.hadoop.yarn.api.protocolrecords.ResourceTypes;
+import org.apache.hadoop.yarn.api.records.ResourceInformation;
+import org.apache.hadoop.yarn.exceptions.YarnException;
import static java.util.stream.Collectors.toList;
@@ -22,32 +22,32 @@ import java.util.Arrays;
public class TestResourceCalculator {
private static final String EXTRA_RESOURCE_NAME = "test";
- private final ResourceCalculator resourceCalculator;
+ private ResourceCalculator resourceCalculator;
- @Parameterized.Parameters(name = "{0}")
public static Collection<Object[]> getParameters() {
- return Arrays.asList(new Object[][] {
- { "DefaultResourceCalculator", new DefaultResourceCalculator() },
- { "DominantResourceCalculator", new DominantResourceCalculator() } });
+ return Arrays.asList(new Object[][]{
+ {"DefaultResourceCalculator", new DefaultResourceCalculator()},
+ {"DominantResourceCalculator", new DominantResourceCalculator()}});
public void setupNoExtraResource() {
// This has to run before each test because we don't know when
// setupExtraResource() might be called
@@ -61,34 +61,38 @@ public class TestResourceCalculator {
ResourceUtils.resetResourceTypes(conf);
- public TestResourceCalculator(String name, ResourceCalculator rs) {
+ public void initTestResourceCalculator(String name, ResourceCalculator rs) {
this.resourceCalculator = rs;
- public void testFitsIn() {
+ @ParameterizedTest(name = "{0}")
+ void testFitsIn(String name, ResourceCalculator rs) {
+ initTestResourceCalculator(name, rs);
if (resourceCalculator instanceof DefaultResourceCalculator) {
- Assert.assertTrue(resourceCalculator.fitsIn(
+ assertTrue(resourceCalculator.fitsIn(
Resource.newInstance(1, 2), Resource.newInstance(2, 1)));
Resource.newInstance(1, 2), Resource.newInstance(2, 2)));
Resource.newInstance(1, 2), Resource.newInstance(1, 2)));
Resource.newInstance(1, 2), Resource.newInstance(1, 1)));
- Assert.assertFalse(resourceCalculator.fitsIn(
+ assertFalse(resourceCalculator.fitsIn(
Resource.newInstance(2, 1), Resource.newInstance(1, 2)));
} else if (resourceCalculator instanceof DominantResourceCalculator) {
@@ -121,22 +125,22 @@ public class TestResourceCalculator {
int expected) {
int actual = resourceCalculator.compare(cluster, res1, res2);
- assertEquals(String.format("Resource comparison did not give the expected "
- + "result for %s v/s %s", res1.toString(), res2.toString()),
- expected, actual);
+ assertEquals(expected, actual, String.format("Resource comparison did not give the expected "
+ + "result for %s v/s %s", res1.toString(), res2.toString()));
if (expected != 0) {
// Try again with args in the opposite order and the negative of the
// expected result.
actual = resourceCalculator.compare(cluster, res2, res1);
- assertEquals(String.format("Resource comparison did not give the "
- + "expected result for %s v/s %s", res2.toString(), res1.toString()),
- expected * -1, actual);
+ assertEquals(expected * -1, actual, String.format("Resource comparison did not give the "
+ + "expected result for %s v/s %s", res2.toString(), res1.toString()));
- public void testCompareWithOnlyMandatory() {
+ void testCompareWithOnlyMandatory(String name, ResourceCalculator rs) {
// This test is necessary because there are optimizations that are only
// triggered when only the mandatory resources are configured.
@@ -173,8 +177,10 @@ public class TestResourceCalculator {
assertComparison(cluster, newResource(3, 1), newResource(3, 0), 1);
- public void testCompare() {
+ void testCompare(String name, ResourceCalculator rs) {
// Test with 3 resources
setupExtraResource();
@@ -209,7 +215,7 @@ public class TestResourceCalculator {
* Verify compare when one or all the resource are zero.
- private void testCompareDominantZeroValueResource(){
+ private void testCompareDominantZeroValueResource() {
Resource cluster = newResource(4L, 4, 0);
assertComparison(cluster, newResource(2, 1, 1), newResource(1, 1, 2), 1);
assertComparison(cluster, newResource(2, 2, 1), newResource(1, 2, 2), 1);
@@ -261,8 +267,11 @@ public class TestResourceCalculator {
assertComparison(cluster, newResource(3, 1, 1), newResource(3, 0, 0), 1);
- public void testCompareWithEmptyCluster() {
+ void testCompareWithEmptyCluster(String name, ResourceCalculator rs) {
Resource clusterResource = Resource.newInstance(0, 0);
// For lhs == rhs
@@ -316,35 +325,39 @@ public class TestResourceCalculator {
boolean greaterThan, boolean greaterThanOrEqual, Resource max,
Resource min) {
- assertEquals("Less Than operation is wrongly calculated.", lessThan,
- Resources.lessThan(resourceCalculator, clusterResource, lhs, rhs));
+ assertEquals(lessThan,
+ Resources.lessThan(resourceCalculator, clusterResource, lhs, rhs),
+ "Less Than operation is wrongly calculated.");
- "Less Than Or Equal To operation is wrongly calculated.",
lessThanOrEqual, Resources.lessThanOrEqual(resourceCalculator,
- clusterResource, lhs, rhs));
+ clusterResource, lhs, rhs), "Less Than Or Equal To operation is wrongly calculated.");
- assertEquals("Greater Than operation is wrongly calculated.",
- greaterThan,
- Resources.greaterThan(resourceCalculator, clusterResource, lhs, rhs));
+ assertEquals(greaterThan,
+ Resources.greaterThan(resourceCalculator, clusterResource, lhs, rhs),
+ "Greater Than operation is wrongly calculated.");
- assertEquals(
- "Greater Than Or Equal To operation is wrongly calculated.",
- greaterThanOrEqual, Resources.greaterThanOrEqual(resourceCalculator,
+ assertEquals(greaterThanOrEqual,
+ Resources.greaterThanOrEqual(resourceCalculator, clusterResource, lhs, rhs),
+ "Greater Than Or Equal To operation is wrongly calculated.");
- assertEquals("Max(value) Operation wrongly calculated.", max,
- Resources.max(resourceCalculator, clusterResource, lhs, rhs));
+ assertEquals(max,
+ Resources.max(resourceCalculator, clusterResource, lhs, rhs),
+ "Max(value) Operation wrongly calculated.");
- assertEquals("Min(value) operation is wrongly calculated.", min,
- Resources.min(resourceCalculator, clusterResource, lhs, rhs));
+ assertEquals(min,
+ Resources.min(resourceCalculator, clusterResource, lhs, rhs),
+ "Min(value) operation is wrongly calculated.");
* Test resource normalization.
- public void testNormalize() {
+ void testNormalize(String name, ResourceCalculator rs) {
// requested resources value cannot be an arbitrary number.
Resource ask = Resource.newInstance(1111, 2);
Resource min = Resource.newInstance(1024, 1);
@@ -420,22 +433,28 @@ public class TestResourceCalculator {
- public void testDivisionByZeroRatioDenominatorIsZero() {
+ void testDivisionByZeroRatioDenominatorIsZero(String name, ResourceCalculator rs) {
float ratio = resourceCalculator.ratio(newResource(1, 1), newResource(0,
0));
assertEquals(Float.POSITIVE_INFINITY, ratio, 0.00001);
- public void testDivisionByZeroRatioNumeratorAndDenominatorIsZero() {
+ void testDivisionByZeroRatioNumeratorAndDenominatorIsZero(String name, ResourceCalculator rs) {
float ratio = resourceCalculator.ratio(newResource(0, 0), newResource(0,
assertEquals(0.0, ratio, 0.00001);
- public void testFitsInDiagnosticsCollector() {
+ void testFitsInDiagnosticsCollector(String name, ResourceCalculator rs) {
// required-resource = (0, 0)
assertEquals(ImmutableSet.of(),
@@ -551,8 +570,10 @@ public class TestResourceCalculator {
- public void testRatioWithNoExtraResource() {
+ void testRatioWithNoExtraResource(String name, ResourceCalculator rs) {
//setup
Resource resource1 = newResource(1, 1);
Resource resource2 = newResource(2, 1);
@@ -570,8 +591,10 @@ public class TestResourceCalculator {
- public void testRatioWithExtraResource() {
+ void testRatioWithExtraResource(String name, ResourceCalculator rs) {
Resource resource1 = newResource(1, 1, 2);
@@ -18,25 +18,6 @@
-import org.apache.hadoop.yarn.api.records.Resource;
-import org.apache.hadoop.yarn.api.records.ResourceTypeInfo;
import java.net.URL;
@@ -48,6 +29,27 @@ import java.util.List;
+import org.apache.hadoop.yarn.api.records.Resource;
+import org.apache.hadoop.yarn.api.records.ResourceTypeInfo;
* Test class to verify all resource utility methods.
@@ -70,15 +72,12 @@ public class TestResourceUtils {
- public ExpectedException expexted = ExpectedException.none();
if (nodeResourcesFile != null && nodeResourcesFile.exists()) {
nodeResourcesFile.delete();
@@ -137,29 +136,28 @@ public class TestResourceUtils {
private void testMemoryAndVcores(Map<String, ResourceInformation> res) {
String memory = ResourceInformation.MEMORY_MB.getName();
String vcores = ResourceInformation.VCORES.getName();
- Assert.assertTrue("Resource 'memory' missing", res.containsKey(memory));
- Assert.assertEquals("'memory' units incorrect",
- ResourceInformation.MEMORY_MB.getUnits(), res.get(memory).getUnits());
- Assert.assertEquals("'memory' types incorrect",
- ResourceInformation.MEMORY_MB.getResourceType(),
- res.get(memory).getResourceType());
- Assert.assertTrue("Resource 'vcores' missing", res.containsKey(vcores));
- Assert.assertEquals("'vcores' units incorrect",
- ResourceInformation.VCORES.getUnits(), res.get(vcores).getUnits());
- Assert.assertEquals("'vcores' type incorrect",
- ResourceInformation.VCORES.getResourceType(),
- res.get(vcores).getResourceType());
+ assertTrue(res.containsKey(memory), "Resource 'memory' missing");
+ assertEquals(ResourceInformation.MEMORY_MB.getUnits(), res.get(memory).getUnits(),
+ "'memory' units incorrect");
+ assertEquals(ResourceInformation.MEMORY_MB.getResourceType(), res.get(memory).getResourceType(),
+ "'memory' types incorrect");
+ assertTrue(res.containsKey(vcores), "Resource 'vcores' missing");
+ assertEquals(ResourceInformation.VCORES.getUnits(), res.get(vcores).getUnits(),
+ "'vcores' units incorrect");
+ assertEquals(ResourceInformation.VCORES.getResourceType(),
+ res.get(vcores).getResourceType(),
+ "'vcores' type incorrect");
- public void testGetResourceTypes() {
+ void testGetResourceTypes() {
Map<String, ResourceInformation> res = ResourceUtils.getResourceTypes();
- Assert.assertEquals(2, res.size());
+ assertEquals(2, res.size());
testMemoryAndVcores(res);
- public void testGetResourceTypesConfigs() throws Exception {
+ void testGetResourceTypesConfigs() throws Exception {
ResourceFileInformation testFile1 =
@@ -183,19 +181,19 @@ public class TestResourceUtils {
res = setupResourceTypesInternal(conf, testInformation.filename);
- Assert.assertEquals(testInformation.resourceCount, res.size());
+ assertEquals(testInformation.resourceCount, res.size());
for (Map.Entry<String, String> entry :
testInformation.resourceNameUnitsMap.entrySet()) {
String resourceName = entry.getKey();
- Assert.assertTrue("Missing key " + resourceName,
- res.containsKey(resourceName));
- Assert.assertEquals(entry.getValue(), res.get(resourceName).getUnits());
+ assertTrue(res.containsKey(resourceName),
+ "Missing key " + resourceName);
+ assertEquals(entry.getValue(), res.get(resourceName).getUnits());
- public void testGetRequestedResourcesFromConfig() {
+ void testGetRequestedResourcesFromConfig() {
//these resource type configurations should be recognised
@@ -229,14 +227,14 @@ public class TestResourceUtils {
Set<String> expectedSet =
new HashSet<>(Arrays.asList(expectedKeys));
- Assert.assertEquals(properList.size(), expectedKeys.length);
+ assertEquals(properList.size(), expectedKeys.length);
properList.forEach(
- item -> Assert.assertTrue(expectedSet.contains(item.getName())));
+ item -> assertTrue(expectedSet.contains(item.getName())));
- public void testGetResourceTypesConfigErrors() throws IOException {
+ void testGetResourceTypesConfigErrors() throws IOException {
String[] resourceFiles = {"resource-types-error-1.xml",
@@ -246,7 +244,7 @@ public class TestResourceUtils {
setupResourceTypesInternal(conf, resourceFile);
- Assert.fail("Expected error with file " + resourceFile);
+ fail("Expected error with file " + resourceFile);
} catch (YarnRuntimeException | IllegalArgumentException e) {
//Test passed
@@ -254,7 +252,7 @@ public class TestResourceUtils {
- public void testInitializeResourcesMap() {
+ void testInitializeResourcesMap() {
String[] empty = {"", ""};
String[] res1 = {"resource1", "m"};
String[] res2 = {"resource2", "G"};
@@ -291,31 +289,30 @@ public class TestResourceUtils {
len = 4;
- Assert.assertEquals(len, ret.size());
+ assertEquals(len, ret.size());
for (String[] resources : test) {
if (resources[0].length() == 0) {
continue;
- Assert.assertTrue(ret.containsKey(resources[0]));
+ assertTrue(ret.containsKey(resources[0]));
ResourceInformation resInfo = ret.get(resources[0]);
- Assert.assertEquals(resources[1], resInfo.getUnits());
- Assert.assertEquals(ResourceTypes.COUNTABLE, resInfo.getResourceType());
+ assertEquals(resources[1], resInfo.getUnits());
+ assertEquals(ResourceTypes.COUNTABLE, resInfo.getResourceType());
// we must always have memory and vcores with their fixed units
- Assert.assertTrue(ret.containsKey("memory-mb"));
+ assertTrue(ret.containsKey("memory-mb"));
ResourceInformation memInfo = ret.get("memory-mb");
- Assert.assertEquals("Mi", memInfo.getUnits());
- Assert.assertEquals(ResourceTypes.COUNTABLE, memInfo.getResourceType());
- Assert.assertTrue(ret.containsKey("vcores"));
+ assertEquals("Mi", memInfo.getUnits());
+ assertEquals(ResourceTypes.COUNTABLE, memInfo.getResourceType());
+ assertTrue(ret.containsKey("vcores"));
ResourceInformation vcoresInfo = ret.get("vcores");
- Assert.assertEquals("", vcoresInfo.getUnits());
- .assertEquals(ResourceTypes.COUNTABLE, vcoresInfo.getResourceType());
+ assertEquals("", vcoresInfo.getUnits());
+ assertEquals(ResourceTypes.COUNTABLE, vcoresInfo.getResourceType());
- public void testInitializeResourcesMapErrors() {
+ void testInitializeResourcesMapErrors() {
String[] mem1 = {"memory-mb", ""};
String[] vcores1 = {"vcores", "M"};
@@ -346,7 +343,7 @@ public class TestResourceUtils {
ResourceUtils.initializeResourcesMap(conf);
- Assert.fail("resource map initialization should fail");
+ fail("resource map initialization should fail");
@@ -354,7 +351,7 @@ public class TestResourceUtils {
- public void testGetResourceInformation() throws Exception {
+ void testGetResourceInformation() throws Exception {
Map<String, Resource> testRun = new HashMap<>();
setupResourceTypesInternal(conf, "resource-types-4.xml");
@@ -372,16 +369,16 @@ public class TestResourceUtils {
ResourceUtils.resetNodeResources();
Map<String, ResourceInformation> actual = setupNodeResources(conf,
resourceFile);
- Assert.assertEquals(actual.size(),
+ assertEquals(actual.size(),
entry.getValue().getResources().length);
for (ResourceInformation resInfo : entry.getValue().getResources()) {
- Assert.assertEquals(resInfo, actual.get(resInfo.getName()));
+ assertEquals(resInfo, actual.get(resInfo.getName()));
- public void testGetNodeResourcesConfigErrors() throws Exception {
+ void testGetNodeResourcesConfigErrors() throws Exception {
String[] invalidNodeResFiles = {"node-resources-error-1.xml"};
@@ -390,7 +387,7 @@ public class TestResourceUtils {
setupNodeResources(conf, resourceFile);
@@ -398,26 +395,28 @@ public class TestResourceUtils {
- public void testGetNodeResourcesRedefineFpgaErrors() throws Exception {
- Configuration conf = new YarnConfiguration();
- expexted.expect(YarnRuntimeException.class);
- expexted.expectMessage("Defined mandatory resource type=yarn.io/fpga");
- setupResourceTypesInternal(conf,
- "resource-types-error-redefine-fpga-unit.xml");
+ void testGetNodeResourcesRedefineFpgaErrors() throws Exception {
+ Throwable exception = assertThrows(YarnRuntimeException.class, () -> {
+ Configuration conf = new YarnConfiguration();
+ setupResourceTypesInternal(conf,
+ "resource-types-error-redefine-fpga-unit.xml");
+ assertTrue(exception.getMessage().contains("Defined mandatory resource type=yarn.io/fpga"));
- public void testGetNodeResourcesRedefineGpuErrors() throws Exception {
- expexted.expectMessage("Defined mandatory resource type=yarn.io/gpu");
- "resource-types-error-redefine-gpu-unit.xml");
+ void testGetNodeResourcesRedefineGpuErrors() throws Exception {
+ "resource-types-error-redefine-gpu-unit.xml");
+ assertTrue(exception.getMessage().contains("Defined mandatory resource type=yarn.io/gpu"));
- public void testResourceNameFormatValidation() {
- String[] validNames = new String[] {
+ void testResourceNameFormatValidation() {
+ String[] validNames = new String[]{
"yarn.io/gpu",
"gpu",
"g_1_2",
@@ -427,7 +426,7 @@ public class TestResourceUtils {
"a....b",
- String[] invalidNames = new String[] {
+ String[] invalidNames = new String[]{
"asd/resource/-name",
"prefix/-resource_1",
"prefix/0123resource",
@@ -443,7 +442,7 @@ public class TestResourceUtils {
for (String invalidName : invalidNames) {
ResourceUtils.validateNameOfResourceNameAndThrowException(invalidName);
- Assert.fail("Expected to fail name check, the name=" + invalidName
+ fail("Expected to fail name check, the name=" + invalidName
+ " is illegal.");
// Expected
@@ -452,7 +451,7 @@ public class TestResourceUtils {
- public void testGetResourceInformationWithDiffUnits() throws Exception {
+ void testGetResourceInformationWithDiffUnits() throws Exception {
@@ -476,82 +475,82 @@ public class TestResourceUtils {
- public void testResourceUnitParsing() throws Exception {
+ void testResourceUnitParsing() throws Exception {
Resource res = ResourceUtils.createResourceFromString("memory=20g,vcores=3",
- ResourceUtils.getResourcesTypeInfo());
- Assert.assertEquals(Resources.createResource(20 * 1024, 3), res);
+ ResourceUtils.getResourcesTypeInfo());
+ assertEquals(Resources.createResource(20 * 1024, 3), res);
res = ResourceUtils.createResourceFromString("memory=20G,vcores=3",
res = ResourceUtils.createResourceFromString("memory=20M,vcores=3",
- Assert.assertEquals(Resources.createResource(20, 3), res);
+ assertEquals(Resources.createResource(20, 3), res);
res = ResourceUtils.createResourceFromString("memory=20m,vcores=3",
res = ResourceUtils.createResourceFromString("memory-mb=20,vcores=3",
res = ResourceUtils.createResourceFromString("memory-mb=20m,vcores=3",
res = ResourceUtils.createResourceFromString("memory-mb=20G,vcores=3",
// W/o unit for memory means bits, and 20 bits will be rounded to 0
res = ResourceUtils.createResourceFromString("memory=20,vcores=3",
- Assert.assertEquals(Resources.createResource(0, 3), res);
+ assertEquals(Resources.createResource(0, 3), res);
// Test multiple resources
List<ResourceTypeInfo> resTypes = new ArrayList<>(
resTypes.add(ResourceTypeInfo.newInstance(ResourceInformation.GPU_URI, ""));
ResourceUtils.reinitializeResources(resTypes);
res = ResourceUtils.createResourceFromString("memory=2G,vcores=3,gpu=0",
- resTypes);
- Assert.assertEquals(2 * 1024, res.getMemorySize());
- Assert.assertEquals(0, res.getResourceValue(ResourceInformation.GPU_URI));
+ resTypes);
+ assertEquals(2 * 1024, res.getMemorySize());
+ assertEquals(0, res.getResourceValue(ResourceInformation.GPU_URI));
res = ResourceUtils.createResourceFromString("memory=2G,vcores=3,gpu=3",
- Assert.assertEquals(3, res.getResourceValue(ResourceInformation.GPU_URI));
+ assertEquals(3, res.getResourceValue(ResourceInformation.GPU_URI));
res = ResourceUtils.createResourceFromString("memory=2G,vcores=3",
res = ResourceUtils.createResourceFromString(
- "memory=2G,vcores=3,yarn.io/gpu=0", resTypes);
+ "memory=2G,vcores=3,yarn.io/gpu=0", resTypes);
- "memory=2G,vcores=3,yarn.io/gpu=3", resTypes);
+ "memory=2G,vcores=3,yarn.io/gpu=3", resTypes);
- public void testMultipleOpsForResourcesWithTags() throws Exception {
+ void testMultipleOpsForResourcesWithTags() throws Exception {
setupResourceTypes(conf, "resource-types-6.xml");
@@ -18,28 +18,30 @@
-import static org.apache.hadoop.yarn.util.resource.Resources.componentwiseMin;
-import static org.apache.hadoop.yarn.util.resource.Resources.componentwiseMax;
import static org.apache.hadoop.yarn.util.resource.Resources.add;
-import static org.apache.hadoop.yarn.util.resource.Resources.multiplyAndRoundUp;
-import static org.apache.hadoop.yarn.util.resource.Resources.subtract;
+import static org.apache.hadoop.yarn.util.resource.Resources.componentwiseMax;
+import static org.apache.hadoop.yarn.util.resource.Resources.componentwiseMin;
+import static org.apache.hadoop.yarn.util.resource.Resources.fitsIn;
import static org.apache.hadoop.yarn.util.resource.Resources.multiply;
import static org.apache.hadoop.yarn.util.resource.Resources.multiplyAndAddTo;
import static org.apache.hadoop.yarn.util.resource.Resources.multiplyAndRoundDown;
-import static org.apache.hadoop.yarn.util.resource.Resources.fitsIn;
+import static org.apache.hadoop.yarn.util.resource.Resources.multiplyAndRoundUp;
+import static org.apache.hadoop.yarn.util.resource.Resources.subtract;
public class TestResources {
private static final String INVALID_RESOURCE_MSG = "Invalid resource value";
@@ -75,12 +77,12 @@ public class TestResources {
setupExtraResourceType();
deleteResourceTypesFile();
@@ -96,8 +98,9 @@ public class TestResources {
- public void testCompareToWithUnboundedResource() {
+ void testCompareToWithUnboundedResource() {
unsetExtraResourceType();
Resource unboundedClone = Resources.clone(ExtendedResources.unbounded());
assertTrue(unboundedClone
@@ -107,8 +110,9 @@ public class TestResources {
unboundedClone.compareTo(createResource(0, Integer.MAX_VALUE)) > 0);
- public void testCompareToWithNoneResource() {
+ void testCompareToWithNoneResource() {
assertTrue(Resources.none().compareTo(createResource(0, 0)) == 0);
assertTrue(Resources.none().compareTo(createResource(1, 0)) < 0);
assertTrue(Resources.none().compareTo(createResource(0, 1)) < 0);
@@ -118,8 +122,9 @@ public class TestResources {
assertTrue(Resources.none().compareTo(createResource(0, 0, 1)) < 0);
- @Test(timeout = 1000)
+ @Timeout(1000)
+ void testFitsIn() {
assertTrue(fitsIn(createResource(1, 1), createResource(2, 2)));
assertTrue(fitsIn(createResource(2, 2), createResource(2, 2)));
assertFalse(fitsIn(createResource(2, 2), createResource(1, 1)));
@@ -130,8 +135,9 @@ public class TestResources {
assertTrue(fitsIn(createResource(1, 1, 1), createResource(2, 2, 2)));
- public void testComponentwiseMin() {
+ void testComponentwiseMin() {
assertEquals(createResource(1, 1),
componentwiseMin(createResource(1, 1), createResource(2, 2)));
@@ -147,7 +153,7 @@ public class TestResources {
- public void testComponentwiseMax() {
+ void testComponentwiseMax() {
assertEquals(createResource(2, 2),
componentwiseMax(createResource(1, 1), createResource(2, 2)));
@@ -165,7 +171,7 @@ public class TestResources {
- public void testAdd() {
+ void testAdd() {
assertEquals(createResource(2, 3),
add(createResource(1, 1), createResource(1, 2)));
assertEquals(createResource(3, 2),
@@ -177,7 +183,7 @@ public class TestResources {
- public void testSubtract() {
+ void testSubtract() {
assertEquals(createResource(1, 0),
subtract(createResource(2, 1), createResource(1, 1)));
assertEquals(createResource(0, 1),
@@ -189,7 +195,7 @@ public class TestResources {
- public void testClone() {
+ void testClone() {
assertEquals(createResource(1, 1), Resources.clone(createResource(1, 1)));
assertEquals(createResource(1, 1, 0),
Resources.clone(createResource(1, 1)));
@@ -200,7 +206,7 @@ public class TestResources {
- public void testMultiply() {
+ void testMultiply() {
assertEquals(createResource(4, 2), multiply(createResource(2, 1), 2));
assertEquals(createResource(4, 2, 0), multiply(createResource(2, 1), 2));
assertEquals(createResource(2, 4), multiply(createResource(1, 2), 2));
@@ -209,61 +215,74 @@ public class TestResources {
assertEquals(createResource(4, 4, 6), multiply(createResource(2, 2, 3), 2));
- public void testMultiplyRoundUp() {
+ void testMultiplyRoundUp() {
final double by = 0.5;
final String memoryErrorMsg = "Invalid memory size.";
final String vcoreErrorMsg = "Invalid virtual core number.";
Resource resource = Resources.createResource(1, 1);
Resource result = Resources.multiplyAndRoundUp(resource, by);
- assertEquals(memoryErrorMsg, result.getMemorySize(), 1);
- assertEquals(vcoreErrorMsg, result.getVirtualCores(), 1);
+ assertEquals(result.getMemorySize(), 1, memoryErrorMsg);
+ assertEquals(result.getVirtualCores(), 1, vcoreErrorMsg);
resource = Resources.createResource(2, 2);
result = Resources.multiplyAndRoundUp(resource, by);
resource = Resources.createResource(0, 0);
- assertEquals(memoryErrorMsg, result.getMemorySize(), 0);
- assertEquals(vcoreErrorMsg, result.getVirtualCores(), 0);
+ assertEquals(result.getMemorySize(), 0, memoryErrorMsg);
+ assertEquals(result.getVirtualCores(), 0, vcoreErrorMsg);
- public void testMultiplyAndRoundUpCustomResources() {
- assertEquals(INVALID_RESOURCE_MSG, createResource(5, 2, 8),
- multiplyAndRoundUp(createResource(3, 1, 5), 1.5));
- assertEquals(INVALID_RESOURCE_MSG, createResource(5, 2, 0),
- multiplyAndRoundUp(createResource(3, 1, 0), 1.5));
- assertEquals(INVALID_RESOURCE_MSG, createResource(5, 5, 0),
- multiplyAndRoundUp(createResource(3, 3, 0), 1.5));
- assertEquals(INVALID_RESOURCE_MSG, createResource(8, 3, 13),
- multiplyAndRoundUp(createResource(3, 1, 5), 2.5));
- assertEquals(INVALID_RESOURCE_MSG, createResource(8, 3, 0),
- multiplyAndRoundUp(createResource(3, 1, 0), 2.5));
- assertEquals(INVALID_RESOURCE_MSG, createResource(8, 8, 0),
- multiplyAndRoundUp(createResource(3, 3, 0), 2.5));
+ void testMultiplyAndRoundUpCustomResources() {
+ assertEquals(createResource(5, 2, 8),
+ multiplyAndRoundUp(createResource(3, 1, 5), 1.5),
+ INVALID_RESOURCE_MSG);
+ assertEquals(createResource(5, 2, 0),
+ multiplyAndRoundUp(createResource(3, 1, 0), 1.5),
+ assertEquals(createResource(5, 5, 0),
+ multiplyAndRoundUp(createResource(3, 3, 0), 1.5),
+ assertEquals(createResource(8, 3, 13),
+ multiplyAndRoundUp(createResource(3, 1, 5), 2.5),
+ assertEquals(createResource(8, 3, 0),
+ multiplyAndRoundUp(createResource(3, 1, 0), 2.5),
+ assertEquals(createResource(8, 8, 0),
+ multiplyAndRoundUp(createResource(3, 3, 0), 2.5),
- public void testMultiplyAndRoundDown() {
- assertEquals(INVALID_RESOURCE_MSG, createResource(4, 1),
- multiplyAndRoundDown(createResource(3, 1), 1.5));
- assertEquals(INVALID_RESOURCE_MSG, createResource(4, 1, 0),
- assertEquals(INVALID_RESOURCE_MSG, createResource(1, 4),
- multiplyAndRoundDown(createResource(1, 3), 1.5));
- assertEquals(INVALID_RESOURCE_MSG, createResource(1, 4, 0),
- assertEquals(INVALID_RESOURCE_MSG, createResource(7, 7, 0),
- multiplyAndRoundDown(createResource(3, 3, 0), 2.5));
- assertEquals(INVALID_RESOURCE_MSG, createResource(2, 2, 7),
- multiplyAndRoundDown(createResource(1, 1, 3), 2.5));
+ void testMultiplyAndRoundDown() {
+ assertEquals(createResource(4, 1),
+ multiplyAndRoundDown(createResource(3, 1), 1.5),
+ assertEquals(createResource(4, 1, 0),
+ assertEquals(createResource(1, 4),
+ multiplyAndRoundDown(createResource(1, 3), 1.5),
+ assertEquals(createResource(1, 4, 0),
+ assertEquals(createResource(7, 7, 0),
+ multiplyAndRoundDown(createResource(3, 3, 0), 2.5),
+ assertEquals(createResource(2, 2, 7),
+ multiplyAndRoundDown(createResource(1, 1, 3), 2.5),
- public void testMultiplyAndAddTo() throws Exception {
+ void testMultiplyAndAddTo() throws Exception {
assertEquals(createResource(6, 4),
@@ -283,7 +302,7 @@ public class TestResources {
- public void testCreateResourceWithSameLongValue() throws Exception {
+ void testCreateResourceWithSameLongValue() throws Exception {
@@ -294,7 +313,7 @@ public class TestResources {
- public void testCreateResourceWithSameIntValue() throws Exception {
+ void testCreateResourceWithSameIntValue() throws Exception {
@@ -305,14 +324,14 @@ public class TestResources {
- public void testCreateSimpleResourceWithSameLongValue() {
+ void testCreateSimpleResourceWithSameLongValue() {
Resource res = ResourceUtils.createResourceWithSameValue(11L);
assertEquals(11L, res.getMemorySize());
assertEquals(11, res.getVirtualCores());
- public void testCreateSimpleResourceWithSameIntValue() {
+ void testCreateSimpleResourceWithSameIntValue() {
Resource res = ResourceUtils.createResourceWithSameValue(11);
assertEquals(11, res.getMemorySize());
@@ -17,11 +17,13 @@
package org.apache.hadoop.yarn.util.timeline;
+import java.util.UUID;
-import java.util.UUID;
* Test case for limiting flow name size.
@@ -31,22 +33,22 @@ public class TestShortenedFlowName {
private static final String TEST_FLOW_NAME = "TestFlowName";
- public void testRemovingUUID() {
+ void testRemovingUUID() {
String flowName = TEST_FLOW_NAME + "-" + UUID.randomUUID();
flowName = TimelineUtils.removeUUID(flowName);
- Assert.assertEquals(TEST_FLOW_NAME, flowName);
+ assertEquals(TEST_FLOW_NAME, flowName);
- public void testShortenedFlowName() {
+ void testShortenedFlowName() {
String flowName = TEST_FLOW_NAME + UUID.randomUUID();
conf.setInt(YarnConfiguration.FLOW_NAME_MAX_SIZE, 8);
String shortenedFlowName = TimelineUtils.shortenFlowName(flowName, conf);
- Assert.assertEquals("TestFlow", shortenedFlowName);
+ assertEquals("TestFlow", shortenedFlowName);
conf.setInt(YarnConfiguration.FLOW_NAME_MAX_SIZE,
YarnConfiguration.FLOW_NAME_DEFAULT_MAX_SIZE);
shortenedFlowName = TimelineUtils.shortenFlowName(flowName, conf);
- Assert.assertEquals(TEST_FLOW_NAME, shortenedFlowName);
+ assertEquals(TEST_FLOW_NAME, shortenedFlowName);
@@ -21,11 +21,11 @@ package org.apache.hadoop.yarn.webapp;
import java.util.Random;
-import org.apache.hadoop.net.ServerSocketUtil;
import com.sun.jersey.test.framework.JerseyTest;
import com.sun.jersey.test.framework.WebAppDescriptor;
+import org.apache.hadoop.net.ServerSocketUtil;
public abstract class JerseyTestBase extends JerseyTest {
public JerseyTestBase(WebAppDescriptor appDescriptor) {
super(appDescriptor);
@@ -21,17 +21,16 @@ package org.apache.hadoop.yarn.webapp;
import javax.ws.rs.ext.ContextResolver;
import javax.ws.rs.ext.Provider;
import javax.xml.bind.JAXBContext;
-import org.apache.hadoop.yarn.webapp.MyTestWebService.MyInfo;
import com.google.inject.Singleton;
import com.sun.jersey.api.json.JSONConfiguration;
import com.sun.jersey.api.json.JSONJAXBContext;
+import org.apache.hadoop.yarn.webapp.MyTestWebService.MyInfo;
@Singleton
@Provider
public class MyTestJAXBContextResolver implements ContextResolver<JAXBContext> {
@@ -27,6 +27,7 @@ import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import org.apache.hadoop.http.JettyUtils;
@@ -20,64 +20,76 @@ package org.apache.hadoop.yarn.webapp;
-import org.apache.hadoop.yarn.webapp.WebApp;
-import org.apache.hadoop.yarn.webapp.WebAppException;
public class TestParseRoute {
- @Test public void testNormalAction() {
+ void testNormalAction() {
assertEquals(Arrays.asList("/foo/action", "foo", "action", ":a1", ":a2"),
- WebApp.parseRoute("/foo/action/:a1/:a2"));
+ WebApp.parseRoute("/foo/action/:a1/:a2"));
- @Test public void testDefaultController() {
+ void testDefaultController() {
assertEquals(Arrays.asList("/", "default", "index"),
- WebApp.parseRoute("/"));
+ WebApp.parseRoute("/"));
- @Test public void testDefaultAction() {
+ void testDefaultAction() {
assertEquals(Arrays.asList("/foo", "foo", "index"),
- WebApp.parseRoute("/foo"));
+ WebApp.parseRoute("/foo"));
- WebApp.parseRoute("/foo/"));
+ WebApp.parseRoute("/foo/"));
- @Test public void testMissingAction() {
+ void testMissingAction() {
assertEquals(Arrays.asList("/foo", "foo", "index", ":a1"),
- WebApp.parseRoute("/foo/:a1"));
+ WebApp.parseRoute("/foo/:a1"));
- @Test public void testDefaultCapture() {
+ void testDefaultCapture() {
assertEquals(Arrays.asList("/", "default", "index", ":a"),
- WebApp.parseRoute("/:a"));
+ WebApp.parseRoute("/:a"));
- @Test public void testPartialCapture1() {
+ void testPartialCapture1() {
assertEquals(Arrays.asList("/foo/action/bar", "foo", "action", "bar", ":a"),
- WebApp.parseRoute("/foo/action/bar/:a"));
+ WebApp.parseRoute("/foo/action/bar/:a"));
- @Test public void testPartialCapture2() {
+ void testPartialCapture2() {
assertEquals(Arrays.asList("/foo/action", "foo", "action", ":a1", "bar",
- ":a2", ":a3"),
- WebApp.parseRoute("/foo/action/:a1/bar/:a2/:a3"));
+ ":a2", ":a3"),
+ WebApp.parseRoute("/foo/action/:a1/bar/:a2/:a3"));
- @Test public void testLeadingPaddings() {
+ void testLeadingPaddings() {
assertEquals(Arrays.asList("/foo/action", "foo", "action", ":a"),
- WebApp.parseRoute(" /foo/action/ :a"));
+ WebApp.parseRoute(" /foo/action/ :a"));
- @Test public void testTrailingPaddings() {
+ void testTrailingPaddings() {
- WebApp.parseRoute("/foo/action//:a / "));
+ WebApp.parseRoute("/foo/action//:a / "));
assertEquals(Arrays.asList("/foo/action", "foo", "action"),
- WebApp.parseRoute("/foo/action / "));
+ WebApp.parseRoute("/foo/action / "));
- @Test(expected=WebAppException.class) public void testMissingLeadingSlash() {
- WebApp.parseRoute("foo/bar");
+ void testMissingLeadingSlash() {
+ assertThrows(WebAppException.class, () -> {
+ WebApp.parseRoute("foo/bar");
@@ -18,15 +18,18 @@
package org.apache.hadoop.yarn.webapp;
-import org.apache.hadoop.yarn.webapp.test.WebAppTests;
-import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
-import org.apache.hadoop.yarn.webapp.view.HtmlPage;
import java.io.PrintWriter;
import javax.servlet.http.HttpServletResponse;
import com.google.inject.Injector;
+import org.apache.hadoop.yarn.webapp.test.WebAppTests;
+import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
+import org.apache.hadoop.yarn.webapp.view.HtmlPage;
public class TestSubViews {
@@ -61,7 +64,8 @@ public class TestSubViews {
- @Test public void testSubView() throws Exception {
+ void testSubView() throws Exception {
Injector injector = WebAppTests.createMockInjector(this);
injector.getInstance(MainView.class).render();
@@ -18,22 +18,16 @@
-import static org.apache.hadoop.yarn.util.StringHelper.join;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_TABLE;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES_ID;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI._INFO_WRAP;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI._TH;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
import java.net.HttpURLConnection;
import java.net.URLEncoder;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.hadoop.net.ServerSocketUtil;
@@ -42,11 +36,18 @@ import org.apache.hadoop.yarn.webapp.view.HtmlPage;
import org.apache.hadoop.yarn.webapp.view.JQueryUI;
import org.apache.hadoop.yarn.webapp.view.RobotsTextPage;
import org.apache.hadoop.yarn.webapp.view.TextPage;
+import static org.apache.hadoop.yarn.util.StringHelper.join;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_TABLE;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES_ID;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI._INFO_WRAP;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI._TH;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
public class TestWebApp {
static final Logger LOG = LoggerFactory.getLogger(TestWebApp.class);
@@ -150,12 +151,14 @@ public class TestWebApp {
String echo(String s) { return s; }
- @Test public void testCreate() {
+ void testCreate() {
WebApp app = WebApps.$for(this).start();
app.stop();
- @Test public void testCreateWithPort() {
+ void testCreateWithPort() {
// see if the ephemeral port is updated
WebApp app = WebApps.$for(this).at(0).start();
int port = app.getListenerAddress().getPort();
@@ -167,72 +170,80 @@ public class TestWebApp {
- @Test(expected=org.apache.hadoop.yarn.webapp.WebAppException.class)
- public void testCreateWithBindAddressNonZeroPort() {
- WebApp app = WebApps.$for(this).at("0.0.0.0:50000").start();
- int port = app.getListenerAddress().getPort();
- assertEquals(50000, port);
- // start another WebApp with same NonZero port
- WebApp app2 = WebApps.$for(this).at("0.0.0.0:50000").start();
- // An exception occurs (findPort disabled)
- app.stop();
- app2.stop();
+ void testCreateWithBindAddressNonZeroPort() {
+ assertThrows(org.apache.hadoop.yarn.webapp.WebAppException.class, () -> {
+ WebApp app = WebApps.$for(this).at("0.0.0.0:50000").start();
+ int port = app.getListenerAddress().getPort();
+ assertEquals(50000, port);
+ // start another WebApp with same NonZero port
+ WebApp app2 = WebApps.$for(this).at("0.0.0.0:50000").start();
+ // An exception occurs (findPort disabled)
+ app.stop();
+ app2.stop();
- public void testCreateWithNonZeroPort() {
- WebApp app = WebApps.$for(this).at(50000).start();
- WebApp app2 = WebApps.$for(this).at(50000).start();
+ void testCreateWithNonZeroPort() {
+ WebApp app = WebApps.$for(this).at(50000).start();
+ WebApp app2 = WebApps.$for(this).at(50000).start();
- @Test public void testServePaths() {
+ void testServePaths() {
WebApp app = WebApps.$for("test", this).start();
assertEquals("/test", app.getRedirectPath());
- String[] expectedPaths = { "/test", "/test/*" };
+ String[] expectedPaths = {"/test", "/test/*"};
String[] pathSpecs = app.getServePathSpecs();
assertEquals(2, pathSpecs.length);
- for(int i = 0; i < expectedPaths.length; i++) {
+ for (int i = 0; i < expectedPaths.length; i++) {
assertTrue(ArrayUtils.contains(pathSpecs, expectedPaths[i]));
- @Test public void testServePathsNoName() {
+ void testServePathsNoName() {
WebApp app = WebApps.$for("", this).start();
assertEquals("/", app.getRedirectPath());
- String[] expectedPaths = { "/*" };
+ String[] expectedPaths = {"/*"};
assertEquals(1, pathSpecs.length);
- @Test public void testDefaultRoutes() throws Exception {
+ void testDefaultRoutes() throws Exception {
String baseUrl = baseUrl(app);
- assertEquals("foo", getContent(baseUrl +"test/foo").trim());
- assertEquals("foo", getContent(baseUrl +"test/foo/index").trim());
- assertEquals("bar", getContent(baseUrl +"test/foo/bar").trim());
- assertEquals("default", getContent(baseUrl +"test").trim());
- assertEquals("default", getContent(baseUrl +"test/").trim());
+ assertEquals("foo", getContent(baseUrl + "test/foo").trim());
+ assertEquals("foo", getContent(baseUrl + "test/foo/index").trim());
+ assertEquals("bar", getContent(baseUrl + "test/foo/bar").trim());
+ assertEquals("default", getContent(baseUrl + "test").trim());
+ assertEquals("default", getContent(baseUrl + "test/").trim());
assertEquals("default", getContent(baseUrl).trim());
- @Test public void testCustomRoutes() throws Exception {
+ void testCustomRoutes() throws Exception {
WebApp app =
WebApps.$for("test", TestWebApp.class, this, "ws").start(new WebApp() {
@@ -249,21 +260,22 @@ public class TestWebApp {
assertEquals("foo", getContent(baseUrl).trim());
- assertEquals("foo", getContent(baseUrl +"test").trim());
- assertEquals("foo1", getContent(baseUrl +"test/1").trim());
- assertEquals("bar", getContent(baseUrl +"test/bar/foo").trim());
- assertEquals("default", getContent(baseUrl +"test/foo/bar").trim());
- assertEquals("default1", getContent(baseUrl +"test/foo/1").trim());
- assertEquals("default2", getContent(baseUrl +"test/foo/bar/2").trim());
- assertEquals(404, getResponseCode(baseUrl +"test/goo"));
- assertEquals(200, getResponseCode(baseUrl +"ws/v1/test"));
- assertTrue(getContent(baseUrl +"ws/v1/test").contains("myInfo"));
+ assertEquals("foo", getContent(baseUrl + "test").trim());
+ assertEquals("foo1", getContent(baseUrl + "test/1").trim());
+ assertEquals("bar", getContent(baseUrl + "test/bar/foo").trim());
+ assertEquals("default", getContent(baseUrl + "test/foo/bar").trim());
+ assertEquals("default1", getContent(baseUrl + "test/foo/1").trim());
+ assertEquals("default2", getContent(baseUrl + "test/foo/bar/2").trim());
+ assertEquals(404, getResponseCode(baseUrl + "test/goo"));
+ assertEquals(200, getResponseCode(baseUrl + "ws/v1/test"));
+ assertTrue(getContent(baseUrl + "ws/v1/test").contains("myInfo"));
- @Test public void testEncodedUrl() throws Exception {
+ void testEncodedUrl() throws Exception {
@@ -292,7 +304,8 @@ public class TestWebApp {
- @Test public void testRobotsText() throws Exception {
+ void testRobotsText() throws Exception {
@@ -319,18 +332,20 @@ public class TestWebApp {
// This is to test the GuiceFilter should only be applied to webAppContext,
// not to logContext;
- @Test public void testYARNWebAppContext() throws Exception {
+ void testYARNWebAppContext() throws Exception {
// setting up the log context
System.setProperty("hadoop.log.dir", "/Not/Existing/dir");
WebApp app = WebApps.$for("test", this).start(new WebApp() {
- @Override public void setup() {
+ public void setup() {
route("/", FooController.class);
});
// Not able to access a non-existing dir, should not redirect to foo.
- assertEquals(404, getResponseCode(baseUrl +"logs"));
+ assertEquals(404, getResponseCode(baseUrl + "logs"));
// should be able to redirect to foo.
@@ -345,7 +360,7 @@ public class TestWebApp {
- public void testPortRanges() throws Exception {
+ void testPortRanges() throws Exception {
WebApp app1 = null;
@@ -355,7 +370,7 @@ public class TestWebApp {
WebApp app5 = null;
int port = ServerSocketUtil.waitForPort(48000, 60);
app1 = WebApps.$for("test", this).at(port).start();
assertEquals(port, app1.getListenerAddress().getPort());
app2 = WebApps.$for("test", this).at("0.0.0.0", port, true).start();
@@ -18,9 +18,6 @@
import javax.ws.rs.core.Response.StatusType;
@@ -30,6 +27,9 @@ import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
public class WebServicesTestUtils {
public static long getXmlLong(Element element, String name) {
String val = getXmlString(element, name);
@@ -122,20 +122,18 @@ public class WebServicesTestUtils {
public static void checkStringMatch(String print, String expected, String got) {
assertTrue(
- print + " doesn't match, got: " + got + " expected: " + expected,
- got.matches(expected));
+ got.matches(expected),
+ print + " doesn't match, got: " + got + " expected: " + expected);
public static void checkStringContains(String print, String expected, String got) {
- print + " doesn't contain expected string, got: " + got + " expected: " + expected,
- got.contains(expected));
+ got.contains(expected),
+ print + " doesn't contain expected string, got: " + got + " expected: " + expected);
public static void checkStringEqual(String print, String expected, String got) {
- assertTrue(
- print + " is not equal, got: " + got + " expected: " + expected,
- got.equals(expected));
+ assertEquals(got, expected);
public static void assertResponseStatusCode(StatusType expected,
@@ -145,6 +143,6 @@ public class WebServicesTestUtils {
public static void assertResponseStatusCode(String errmsg,
StatusType expected, StatusType actual) {
- assertEquals(errmsg, expected.getStatusCode(), actual.getStatusCode());
+ assertEquals(expected.getStatusCode(), actual.getStatusCode(), errmsg);
@@ -17,19 +17,27 @@
package org.apache.hadoop.yarn.webapp.hamlet2;
-import java.util.EnumSet;
import org.apache.hadoop.yarn.webapp.SubView;
-import static org.apache.hadoop.yarn.webapp.hamlet2.HamletSpec.*;
+import static org.apache.hadoop.yarn.webapp.hamlet2.HamletSpec.LinkType;
+import static org.apache.hadoop.yarn.webapp.hamlet2.HamletSpec.Media;
+import static org.apache.hadoop.yarn.webapp.hamlet2.HamletSpec.TABLE;
+import static org.mockito.Mockito.atLeast;
public class TestHamlet {
- @Test public void testHamlet() {
+ void testHamlet() {
Hamlet h = newHamlet().
title("test").
h1("heading 1").
@@ -69,7 +77,8 @@ public class TestHamlet {
verify(out, never()).print("</p>");
- @Test public void testTable() {
+ void testTable() {
title("test table").
link("style.css");
@@ -90,7 +99,8 @@ public class TestHamlet {
verify(out, atLeast(1)).print("</tr>");
- @Test public void testEnumAttrs() {
+ void testEnumAttrs() {
meta_http("Content-type", "text/html; charset=utf-8").
title("test enum attrs").
@@ -109,7 +119,8 @@ public class TestHamlet {
verify(out).print(" rel=\"start index\"");
- @Test public void testScriptStyle() {
+ void testScriptStyle() {
script("a.js").script("b.js").
style("h1 { font-size: 1.2em }");
@@ -121,7 +132,8 @@ public class TestHamlet {
verify(out).print(" type=\"text/css\"");
- @Test public void testPreformatted() {
+ void testPreformatted() {
div().
i("inline before pre").
@@ -144,7 +156,8 @@ public class TestHamlet {
@Override public void renderPartial() {}
- @Test public void testSubViews() {
+ void testSubViews() {
title("test sub-views").
div("#view1").__(TestView1.class).__().
@@ -153,8 +166,8 @@ public class TestHamlet {
PrintWriter out = h.getWriter();
out.flush();
assertEquals(0, h.nestLevel);
- verify(out).print("["+ TestView1.class.getName() +"]");
- verify(out).print("["+ TestView2.class.getName() +"]");
+ verify(out).print("[" + TestView1.class.getName() + "]");
+ verify(out).print("[" + TestView2.class.getName() + "]");
static Hamlet newHamlet() {
@@ -18,19 +18,29 @@
-import org.apache.hadoop.yarn.webapp.hamlet2.HamletSpec.*;
+import org.apache.hadoop.yarn.webapp.hamlet2.HamletSpec.CoreAttrs;
+import org.apache.hadoop.yarn.webapp.hamlet2.HamletSpec.H1;
+import org.apache.hadoop.yarn.webapp.hamlet2.HamletSpec.LINK;
+import org.apache.hadoop.yarn.webapp.hamlet2.HamletSpec.SCRIPT;
+import static org.mockito.Mockito.verifyNoMoreInteractions;
public class TestHamletImpl {
* Test the generic implementation methods
* @see TestHamlet for Hamlet syntax
- @Test public void testGeneric() {
+ void testGeneric() {
PrintWriter out = spy(new PrintWriter(System.out));
HamletImpl hi = new HamletImpl(out, 0, false);
hi.
@@ -66,7 +76,8 @@ public class TestHamletImpl {
verify(out, never()).print("</sub4>");
- @Test public void testSetSelector() {
+ void testSetSelector() {
CoreAttrs e = mock(CoreAttrs.class);
HamletImpl.setSelector(e, "#id.class");
@@ -81,7 +92,8 @@ public class TestHamletImpl {
verify(t).__("heading");
- @Test public void testSetLinkHref() {
+ void testSetLinkHref() {
LINK link = mock(LINK.class);
HamletImpl.setLinkHref(link, "uri");
HamletImpl.setLinkHref(link, "style.css");
@@ -93,7 +105,8 @@ public class TestHamletImpl {
verifyNoMoreInteractions(link);
- @Test public void testSetScriptSrc() {
+ void testSetScriptSrc() {
SCRIPT script = mock(SCRIPT.class);
HamletImpl.setScriptSrc(script, "uri");
HamletImpl.setScriptSrc(script, "script.js");
@@ -17,41 +17,51 @@
-import static org.apache.hadoop.yarn.webapp.hamlet2.HamletImpl.*;
import org.apache.hadoop.yarn.webapp.WebAppException;
+import static org.apache.hadoop.yarn.webapp.hamlet2.HamletImpl.S_CLASS;
+import static org.apache.hadoop.yarn.webapp.hamlet2.HamletImpl.S_ID;
+import static org.apache.hadoop.yarn.webapp.hamlet2.HamletImpl.parseSelector;
public class TestParseSelector {
- @Test public void testNormal() {
+ void testNormal() {
String[] res = parseSelector("#id.class");
assertEquals("id", res[S_ID]);
assertEquals("class", res[S_CLASS]);
- @Test public void testMultiClass() {
+ void testMultiClass() {
String[] res = parseSelector("#id.class1.class2");
assertEquals("class1 class2", res[S_CLASS]);
- @Test public void testMissingId() {
+ void testMissingId() {
String[] res = parseSelector(".class");
assertNull(res[S_ID]);
- @Test public void testMissingClass() {
+ void testMissingClass() {
String[] res = parseSelector("#id");
assertNull(res[S_CLASS]);
- @Test(expected=WebAppException.class) public void testMissingAll() {
- parseSelector("");
+ void testMissingAll() {
+ parseSelector("");
@@ -20,7 +20,6 @@ package org.apache.hadoop.yarn.webapp.log;
@@ -18,23 +18,26 @@
package org.apache.hadoop.yarn.webapp.test;
-import com.google.inject.AbstractModule;
-import com.google.inject.Injector;
-import com.google.inject.servlet.RequestScoped;
+import com.google.inject.AbstractModule;
+import com.google.inject.Injector;
+import com.google.inject.servlet.RequestScoped;
public class TestWebAppTests {
static final Logger LOG = LoggerFactory.getLogger(TestWebAppTests.class);
- @Test public void testInstances() throws Exception {
+ void testInstances() throws Exception {
HttpServletRequest req = injector.getInstance(HttpServletRequest.class);
HttpServletResponse res = injector.getInstance(HttpServletResponse.class);
@@ -61,24 +64,27 @@ public class TestWebAppTests {
static class FooBar extends Bar {
- @Test public void testCreateInjector() throws Exception {
+ void testCreateInjector() throws Exception {
Bar bar = new Bar();
Injector injector = WebAppTests.createMockInjector(Foo.class, bar);
logInstances(injector.getInstance(HttpServletRequest.class),
- injector.getInstance(HttpServletResponse.class),
- injector.getInstance(HttpServletResponse.class).getWriter());
+ injector.getInstance(HttpServletResponse.class),
+ injector.getInstance(HttpServletResponse.class).getWriter());
assertSame(bar, injector.getInstance(Foo.class));
- @Test public void testCreateInjector2() {
+ void testCreateInjector2() {
final FooBar foobar = new FooBar();
Injector injector = WebAppTests.createMockInjector(Foo.class, bar,
new AbstractModule() {
- @Override protected void configure() {
- bind(Bar.class).toInstance(foobar);
+ protected void configure() {
+ bind(Bar.class).toInstance(foobar);
assertNotSame(bar, injector.getInstance(Bar.class));
assertSame(foobar, injector.getInstance(Bar.class));
@@ -87,11 +93,12 @@ public class TestWebAppTests {
static class ScopeTest {
- @Test public void testRequestScope() {
+ void testRequestScope() {
assertSame(injector.getInstance(ScopeTest.class),
- injector.getInstance(ScopeTest.class));
+ injector.getInstance(ScopeTest.class));
private void logInstances(HttpServletRequest req, HttpServletResponse res,
@@ -18,30 +18,29 @@
-import org.apache.hadoop.yarn.webapp.Controller;
-import org.apache.hadoop.yarn.webapp.SubView;
-import org.apache.hadoop.yarn.webapp.View;
+import java.io.PrintWriter;
import java.lang.reflect.Method;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
-import com.google.inject.Module;
-import com.google.inject.Scopes;
import com.google.inject.AbstractModule;
import com.google.inject.Guice;
+import com.google.inject.Module;
import com.google.inject.Provides;
+import com.google.inject.Scopes;
-import java.io.PrintWriter;
-import javax.servlet.http.HttpServletResponse;
-import javax.servlet.http.HttpServletRequest;
+import org.apache.hadoop.yarn.webapp.Controller;
+import org.apache.hadoop.yarn.webapp.SubView;
+import org.apache.hadoop.yarn.webapp.View;
+import org.apache.hadoop.yarn.webapp.WebAppException;
public class WebAppTests {
@@ -18,14 +18,18 @@
package org.apache.hadoop.yarn.webapp.util;
-import static org.junit.Assert.assertArrayEquals;
import java.net.UnknownHostException;
import org.apache.hadoop.http.HttpServer2;
@@ -35,13 +39,10 @@ import org.apache.hadoop.security.alias.CredentialProvider;
import org.apache.hadoop.security.alias.CredentialProviderFactory;
import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
public class TestWebAppUtils {
private static final String RM1_NODE_ID = "rm1";
@@ -53,7 +54,7 @@ public class TestWebAppUtils {
private static final String anyIpAddress = "1.2.3.4";
private static Map<String, String> savedStaticResolution = new HashMap<>();
public static void initializeDummyHostnameResolution() throws Exception {
String previousIpAddress;
for (String hostName : dummyHostNames) {
@@ -64,7 +65,7 @@ public class TestWebAppUtils {
public static void restoreDummyHostnameResolution() throws Exception {
for (Map.Entry<String, String> hostnameToIpEntry : savedStaticResolution.entrySet()) {
NetUtils.addStaticResolution(hostnameToIpEntry.getKey(), hostnameToIpEntry.getValue());
@@ -72,7 +73,7 @@ public class TestWebAppUtils {
- public void TestRMWebAppURLRemoteAndLocal() throws UnknownHostException {
+ void TestRMWebAppURLRemoteAndLocal() throws UnknownHostException {
Configuration configuration = new Configuration();
final String rmAddress = "host1:8088";
configuration.set(YarnConfiguration.RM_WEBAPP_ADDRESS, rmAddress);
@@ -84,30 +85,32 @@ public class TestWebAppUtils {
configuration.set(YarnConfiguration.RM_HA_IDS, RM1_NODE_ID + "," + RM2_NODE_ID);
String rmRemoteUrl = WebAppUtils.getResolvedRemoteRMWebAppURLWithoutScheme(configuration);
- Assert.assertEquals("ResolvedRemoteRMWebAppUrl should resolve to the first HA RM address", rm1Address, rmRemoteUrl);
+ assertEquals(rm1Address, rmRemoteUrl,
+ "ResolvedRemoteRMWebAppUrl should resolve to the first HA RM address");
String rmLocalUrl = WebAppUtils.getResolvedRMWebAppURLWithoutScheme(configuration);
- Assert.assertEquals("ResolvedRMWebAppUrl should resolve to the default RM webapp address", rmAddress, rmLocalUrl);
+ assertEquals(rmAddress, rmLocalUrl,
+ "ResolvedRMWebAppUrl should resolve to the default RM webapp address");
- public void testGetPassword() throws Exception {
+ void testGetPassword() throws Exception {
Configuration conf = provisionCredentialsForSSL();
// use WebAppUtils as would be used by loadSslConfiguration
- Assert.assertEquals("keypass",
+ assertEquals("keypass",
WebAppUtils.getPassword(conf, WebAppUtils.WEB_APP_KEY_PASSWORD_KEY));
- Assert.assertEquals("storepass",
+ assertEquals("storepass",
WebAppUtils.getPassword(conf, WebAppUtils.WEB_APP_KEYSTORE_PASSWORD_KEY));
- Assert.assertEquals("trustpass",
+ assertEquals("trustpass",
WebAppUtils.getPassword(conf, WebAppUtils.WEB_APP_TRUSTSTORE_PASSWORD_KEY));
// let's make sure that a password that doesn't exist returns null
- Assert.assertEquals(null, WebAppUtils.getPassword(conf,"invalid-alias"));
+ assertNull(WebAppUtils.getPassword(conf, "invalid-alias"));
- public void testLoadSslConfiguration() throws Exception {
+ void testLoadSslConfiguration() throws Exception {
TestBuilder builder = (TestBuilder) new TestBuilder();
@@ -116,12 +119,12 @@ public class TestWebAppUtils {
String keypass = "keypass";
String storepass = "storepass";
- String trustpass = "trustpass";
+ String trustpass = "trustpass";
// make sure we get the right passwords in the builder
- assertEquals(keypass, ((TestBuilder)builder).keypass);
- assertEquals(storepass, ((TestBuilder)builder).keystorePassword);
- assertEquals(trustpass, ((TestBuilder)builder).truststorePassword);
+ assertEquals(keypass, ((TestBuilder) builder).keypass);
+ assertEquals(storepass, ((TestBuilder) builder).keystorePassword);
+ assertEquals(trustpass, ((TestBuilder) builder).truststorePassword);
protected Configuration provisionCredentialsForSSL() throws IOException,
@@ -145,11 +148,11 @@ public class TestWebAppUtils {
char[] trustpass = {'t', 'r', 'u', 's', 't', 'p', 'a', 's', 's'};
// ensure that we get nulls when the key isn't there
- assertEquals(null, provider.getCredentialEntry(
+ assertNull(provider.getCredentialEntry(
WebAppUtils.WEB_APP_KEY_PASSWORD_KEY));
WebAppUtils.WEB_APP_KEYSTORE_PASSWORD_KEY));
WebAppUtils.WEB_APP_TRUSTSTORE_PASSWORD_KEY));
// create new aliases
@@ -180,7 +183,7 @@ public class TestWebAppUtils {
- public void testAppendQueryParams() throws Exception {
+ void testAppendQueryParams() throws Exception {
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
String targetUri = "/test/path";
Mockito.when(request.getCharacterEncoding()).thenReturn(null);
@@ -194,12 +197,12 @@ public class TestWebAppUtils {
for (Map.Entry<String, String> entry : paramResultMap.entrySet()) {
Mockito.when(request.getQueryString()).thenReturn(entry.getKey());
String uri = WebAppUtils.appendQueryParams(request, targetUri);
- Assert.assertEquals(entry.getValue(), uri);
+ assertEquals(entry.getValue(), uri);
- public void testGetHtmlEscapedURIWithQueryString() throws Exception {
+ void testGetHtmlEscapedURIWithQueryString() throws Exception {
@@ -214,7 +217,7 @@ public class TestWebAppUtils {
String uri = WebAppUtils.getHtmlEscapedURIWithQueryString(request);
@@ -22,6 +22,8 @@ import java.net.HttpURLConnection;
@@ -29,8 +31,9 @@ import org.apache.hadoop.http.TestHttpServer.EchoServlet;
public class TestWebServiceClient {
@@ -43,17 +46,17 @@ public class TestWebServiceClient {
static final String SERVLET_PATH_ECHO = "/" + SERVLET_NAME_ECHO;
- public void testGetWebServiceClient() throws Exception {
+ void testGetWebServiceClient() throws Exception {
conf.set(YarnConfiguration.YARN_HTTP_POLICY_KEY, "HTTPS_ONLY");
WebServiceClient.initialize(conf);
WebServiceClient client = WebServiceClient.getWebServiceClient();
- Assert.assertNotNull(client.getSSLFactory());
+ assertNotNull(client.getSSLFactory());
WebServiceClient.destroy();
- public void testCreateClient() throws Exception {
+ void testCreateClient() throws Exception {
File base = new File(BASEDIR);
@@ -91,7 +94,7 @@ public class TestWebServiceClient {
HttpURLConnection conn = client.getHttpURLConnectionFactory()
.getHttpURLConnection(u);
- Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
+ assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
FileUtil.fullyDelete(new File(BASEDIR));
@@ -19,36 +19,36 @@
package org.apache.hadoop.yarn.webapp.view;
import org.apache.hadoop.yarn.webapp.ResponseInfo;
import org.apache.hadoop.yarn.webapp.test.WebAppTests;
-import org.apache.hadoop.yarn.webapp.view.ErrorPage;
-import org.apache.hadoop.yarn.webapp.view.FooterBlock;
-import org.apache.hadoop.yarn.webapp.view.HeaderBlock;
-import org.apache.hadoop.yarn.webapp.view.JQueryUI;
public class TestCommonViews {
- @Test public void testErrorPage() {
+ void testErrorPage() {
Injector injector = WebAppTests.testPage(ErrorPage.class);
- @Test public void testHeaderBlock() {
+ void testHeaderBlock() {
WebAppTests.testBlock(HeaderBlock.class);
- @Test public void testFooterBlock() {
+ void testFooterBlock() {
WebAppTests.testBlock(FooterBlock.class);
- @Test public void testJQueryUI() {
+ void testJQueryUI() {
WebAppTests.testBlock(JQueryUI.class);
- @Test public void testInfoBlock() {
+ void testInfoBlock() {
ResponseInfo info = injector.getInstance(ResponseInfo.class);