Skip to content

Commit 4dc7db6

Browse files
authored
Merge branch 'trunk' into HADOOP-19425-PART3
2 parents 64468f4 + c21f9bd commit 4dc7db6

File tree

164 files changed

+4439
-2144
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

164 files changed

+4439
-2144
lines changed

LICENSE.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -210,6 +210,7 @@ See licenses/ for text of these licenses.
210210
Apache Software Foundation License 2.0
211211
--------------------------------------
212212

213+
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LimitInputStream.java
213214
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/nvd3-1.8.5.* (css and js files)
214215
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/checker/AbstractFuture.java
215216
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/checker/TimeoutFuture.java

hadoop-client-modules/hadoop-client-integration-tests/pom.xml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -164,7 +164,6 @@
164164
<goal>verify</goal>
165165
</goals>
166166
<configuration>
167-
<trimStackTrace>false</trimStackTrace>
168167
</configuration>
169168
</execution>
170169
</executions>

hadoop-cloud-storage-project/hadoop-cos/src/test/java/org/apache/hadoop/fs/cosn/CosNTestUtils.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
import org.apache.commons.lang3.StringUtils;
2424
import org.apache.hadoop.conf.Configuration;
2525
import org.apache.hadoop.fs.Path;
26-
import org.junit.internal.AssumptionViolatedException;
26+
import org.opentest4j.TestAbortedException;
2727

2828
/**
2929
* Utilities for the CosN tests.
@@ -52,7 +52,7 @@ public static CosNFileSystem createTestFileSystem(
5252
testUri = URI.create(fsName);
5353
liveTest = testUri.getScheme().equals(CosNFileSystem.SCHEME);
5454
} else {
55-
throw new AssumptionViolatedException("no test file system in " +
55+
throw new TestAbortedException("no test file system in " +
5656
fsName);
5757
}
5858

hadoop-cloud-storage-project/hadoop-huaweicloud/src/test/java/org/apache/hadoop/fs/obs/OBSTestUtils.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
import org.apache.commons.lang3.StringUtils;
2222
import org.apache.hadoop.conf.Configuration;
2323
import org.apache.hadoop.fs.Path;
24-
import org.junit.internal.AssumptionViolatedException;
24+
import org.opentest4j.TestAbortedException;
2525

2626
import java.io.IOException;
2727
import java.net.URI;
@@ -45,7 +45,7 @@ public final class OBSTestUtils {
4545
* @param conf configuration
4646
* @return the FS
4747
* @throws IOException IO Problems
48-
* @throws AssumptionViolatedException if the FS is not named
48+
* @throws TestAbortedException if the FS is not named
4949
*/
5050
public static OBSFileSystem createTestFileSystem(Configuration conf)
5151
throws IOException {
@@ -62,7 +62,7 @@ public static OBSFileSystem createTestFileSystem(Configuration conf)
6262
* @param purge flag to enable Multipart purging
6363
* @return the FS
6464
* @throws IOException IO Problems
65-
* @throws AssumptionViolatedException if the FS is not named
65+
* @throws TestAbortedException if the FS is not named
6666
*/
6767
@SuppressWarnings("deprecation")
6868
public static OBSFileSystem createTestFileSystem(Configuration conf,
@@ -80,7 +80,7 @@ public static OBSFileSystem createTestFileSystem(Configuration conf,
8080
if (!liveTest) {
8181
// This doesn't work with our JUnit 3 style test cases, so instead we'll
8282
// make this whole class not run by default
83-
throw new AssumptionViolatedException(
83+
throw new TestAbortedException(
8484
"No test filesystem in " + TEST_FS_OBS_NAME);
8585
}
8686
OBSFileSystem fs1 = new OBSFileSystem();

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LimitInputStream.java

Lines changed: 9 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,17 @@
11
/*
2+
* Copyright (C) 2007 The Guava Authors
23
*
3-
* Licensed to the Apache Software Foundation (ASF) under one
4-
* or more contributor license agreements. See the NOTICE file
5-
* distributed with this work for additional information
6-
* regarding copyright ownership. The ASF licenses this file
7-
* to you under the Apache License, Version 2.0 (the
8-
* "License"); you may not use this file except in compliance
9-
* with the License. You may obtain a copy of the License at
4+
* Licensed under the Apache License, Version 2.0 (the "License"); you may
5+
* not use this file except in compliance with the License. You may obtain a
6+
* copy of the License at
107
*
11-
* http://www.apache.org/licenses/LICENSE-2.0
8+
* http://www.apache.org/licenses/LICENSE-2.0
129
*
1310
* Unless required by applicable law or agreed to in writing, software
14-
* distributed under the License is distributed on an "AS IS" BASIS,
15-
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16-
* See the License for the specific language governing permissions and
17-
* limitations under the License.
11+
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12+
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13+
* License for the specific language governing permissions and limitations under
14+
* the License.
1815
*/
1916

2017
package org.apache.hadoop.util;

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractCreateTest.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828
import org.apache.hadoop.fs.StreamCapabilities;
2929

3030
import org.junit.jupiter.api.Test;
31-
import org.junit.AssumptionViolatedException;
31+
import org.opentest4j.TestAbortedException;
3232
import org.slf4j.Logger;
3333
import org.slf4j.LoggerFactory;
3434

@@ -172,7 +172,7 @@ private void testOverwriteNonEmptyDirectory(boolean useBuilder)
172172
} catch (AssertionError failure) {
173173
if (isSupported(CREATE_OVERWRITES_DIRECTORY)) {
174174
// file/directory hack surfaces here
175-
throw new AssumptionViolatedException(failure.toString(), failure);
175+
throw new TestAbortedException(failure.toString(), failure);
176176
}
177177
// else: rethrow
178178
throw failure;

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractFSContractTestBase.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,8 +28,8 @@
2828
import org.junit.jupiter.api.BeforeAll;
2929
import org.junit.jupiter.api.BeforeEach;
3030
import org.junit.jupiter.api.Timeout;
31-
import org.junit.AssumptionViolatedException;
3231
import org.junit.jupiter.api.extension.RegisterExtension;
32+
import org.opentest4j.TestAbortedException;
3333
import org.slf4j.Logger;
3434
import org.slf4j.LoggerFactory;
3535

@@ -149,7 +149,7 @@ protected boolean isSupported(String feature) throws IOException {
149149
*/
150150
protected void assumeEnabled() {
151151
if (!contract.isEnabled())
152-
throw new AssumptionViolatedException("test cases disabled for " + contract);
152+
throw new TestAbortedException("test cases disabled for " + contract);
153153
}
154154

155155
/**

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ContractTestUtils.java

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@
3737
import org.apache.hadoop.util.functional.FutureIO;
3838

3939
import org.junit.jupiter.api.Assertions;
40-
import org.junit.AssumptionViolatedException;
40+
import org.opentest4j.TestAbortedException;
4141
import org.slf4j.Logger;
4242
import org.slf4j.LoggerFactory;
4343

@@ -565,19 +565,19 @@ public static void noteAction(String action) {
565565
* exception for the Junit test runner to mark as failed.
566566
* @param message text message
567567
* @param failure what failed
568-
* @throws AssumptionViolatedException always
568+
* @throws TestAbortedException always
569569
*/
570570
public static void downgrade(String message, Throwable failure) {
571571
LOG.warn("Downgrading test " + message, failure);
572-
AssumptionViolatedException ave =
573-
new AssumptionViolatedException(failure, null);
572+
TestAbortedException ave =
573+
new TestAbortedException(null, failure);
574574
throw ave;
575575
}
576576

577577
/**
578578
* report an overridden test as unsupported.
579579
* @param message message to use in the text
580-
* @throws AssumptionViolatedException always
580+
* @throws TestAbortedException always
581581
*/
582582
public static void unsupported(String message) {
583583
skip(message);
@@ -586,11 +586,11 @@ public static void unsupported(String message) {
586586
/**
587587
* report a test has been skipped for some reason.
588588
* @param message message to use in the text
589-
* @throws AssumptionViolatedException always
589+
* @throws TestAbortedException always
590590
*/
591591
public static void skip(String message) {
592592
LOG.info("Skipping: {}", message);
593-
throw new AssumptionViolatedException(message);
593+
throw new TestAbortedException(message);
594594
}
595595

596596
/**

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/PlatformAssumptions.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
*/
1818
package org.apache.hadoop.test;
1919

20-
import org.junit.AssumptionViolatedException;
20+
import org.opentest4j.TestAbortedException;
2121

2222
/**
2323
* JUnit assumptions for the environment (OS).
@@ -34,13 +34,13 @@ public static void assumeNotWindows() {
3434

3535
public static void assumeNotWindows(String message) {
3636
if (WINDOWS) {
37-
throw new AssumptionViolatedException(message);
37+
throw new TestAbortedException(message);
3838
}
3939
}
4040

4141
public static void assumeWindows() {
4242
if (!WINDOWS) {
43-
throw new AssumptionViolatedException(
43+
throw new TestAbortedException(
4444
"Expected Windows platform but got " + OS_NAME);
4545
}
4646
}

hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/AdminStatesBaseTest.java

Lines changed: 16 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -17,9 +17,9 @@
1717
*/
1818
package org.apache.hadoop.hdfs;
1919

20-
import static org.junit.Assert.assertEquals;
21-
import static org.junit.Assert.assertFalse;
22-
import static org.junit.Assert.assertTrue;
20+
import static org.junit.jupiter.api.Assertions.assertEquals;
21+
import static org.junit.jupiter.api.Assertions.assertFalse;
22+
import static org.junit.jupiter.api.Assertions.assertTrue;
2323

2424
import java.io.IOException;
2525
import java.util.ArrayList;
@@ -28,12 +28,10 @@
2828
import java.util.List;
2929
import java.util.Map;
3030
import java.util.Random;
31-
import java.util.concurrent.TimeUnit;
3231

3332
import org.apache.hadoop.util.Lists;
34-
import org.junit.Rule;
35-
import org.junit.rules.TemporaryFolder;
36-
import org.junit.rules.Timeout;
33+
import org.junit.jupiter.api.Timeout;
34+
import org.junit.jupiter.api.io.TempDir;
3735
import org.slf4j.Logger;
3836
import org.slf4j.LoggerFactory;
3937
import org.apache.hadoop.conf.Configuration;
@@ -51,12 +49,13 @@
5149
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
5250
import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
5351
import org.apache.hadoop.hdfs.util.HostsFileWriter;
54-
import org.junit.After;
55-
import org.junit.Before;
52+
import org.junit.jupiter.api.AfterEach;
53+
import org.junit.jupiter.api.BeforeEach;
5654

5755
/**
5856
* This class provide utilities for testing of the admin operations of nodes.
5957
*/
58+
@Timeout(600)
6059
public class AdminStatesBaseTest {
6160
public static final Logger LOG =
6261
LoggerFactory.getLogger(AdminStatesBaseTest.class);
@@ -69,10 +68,9 @@ public class AdminStatesBaseTest {
6968

7069
final private Random myrand = new Random();
7170

72-
@Rule
73-
public TemporaryFolder baseDir = new TemporaryFolder();
74-
@Rule
75-
public Timeout timeout = new Timeout(600, TimeUnit.SECONDS);
71+
@SuppressWarnings("checkstyle:VisibilityModifier")
72+
@TempDir
73+
public java.nio.file.Path baseDir;
7674

7775
private HostsFileWriter hostsFileWriter;
7876
private Configuration conf;
@@ -91,7 +89,7 @@ protected MiniDFSCluster getCluster() {
9189
return cluster;
9290
}
9391

94-
@Before
92+
@BeforeEach
9593
public void setup() throws IOException {
9694
// Set up the hosts/exclude files.
9795
hostsFileWriter = new HostsFileWriter();
@@ -118,7 +116,7 @@ public void setup() throws IOException {
118116

119117
}
120118

121-
@After
119+
@AfterEach
122120
public void teardown() throws IOException {
123121
hostsFileWriter.cleanup();
124122
shutdownCluster();
@@ -391,7 +389,7 @@ protected DFSClient getDfsClient(final int nnIndex) throws IOException {
391389
protected static void validateCluster(DFSClient client, int numDNs)
392390
throws IOException {
393391
DatanodeInfo[] info = client.datanodeReport(DatanodeReportType.LIVE);
394-
assertEquals("Number of Datanodes ", numDNs, info.length);
392+
assertEquals(numDNs, info.length, "Number of Datanodes ");
395393
}
396394

397395
/** Start a MiniDFSCluster.
@@ -406,7 +404,7 @@ protected void startCluster(int numNameNodes, int numDatanodes,
406404
protected void startCluster(int numNameNodes, int numDatanodes,
407405
boolean setupHostsFile, long[] nodesCapacity,
408406
boolean checkDataNodeHostConfig, boolean federation) throws IOException {
409-
MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf, baseDir.getRoot())
407+
MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf, baseDir.toFile())
410408
.numDataNodes(numDatanodes);
411409
if (federation) {
412410
builder.nnTopology(
@@ -441,7 +439,7 @@ protected void startSimpleCluster(int numNameNodes, int numDatanodes)
441439

442440

443441
protected void startSimpleHACluster(int numDatanodes) throws IOException {
444-
cluster = new MiniDFSCluster.Builder(conf, baseDir.getRoot())
442+
cluster = new MiniDFSCluster.Builder(conf, baseDir.toFile())
445443
.nnTopology(MiniDFSNNTopology.simpleHATopology()).numDataNodes(
446444
numDatanodes).build();
447445
cluster.transitionToActive(0);

0 commit comments

Comments
 (0)