Skip to content

Commit e95e230

Browse files
committed
Merge branch 'trunk' into HADOOP-19406_UDSSupport
2 parents 6651d38 + 7c4e8a2 commit e95e230

File tree

96 files changed

+2280
-1258
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

96 files changed

+2280
-1258
lines changed

BUILDING.txt

+4-3
Original file line numberDiff line numberDiff line change
@@ -635,11 +635,12 @@ hadoop-dist/target upon successful build. Run these commands from an
635635
"x64 Native Tools Command Prompt for VS 2019" which can be found under "Visual Studio 2019" in the
636636
Windows start menu. If you're using the Docker image from Dockerfile_windows_10, you'll be
637637
logged into "x64 Native Tools Command Prompt for VS 2019" automatically when you start the
638-
container.
638+
container. The Docker image does not have a full VS install, so you need to add the
639+
-Dskip.platformToolsetDetection option (already included below in the examples).
639640

640641
> set classpath=
641642
> set PROTOBUF_HOME=C:\vcpkg\installed\x64-windows
642-
> mvn clean package -Dhttps.protocols=TLSv1.2 -DskipTests -DskipDocs -Pnative-win,dist^
643+
> mvn clean package -Dhttps.protocols=TLSv1.2 -DskipTests -DskipDocs -Pnative-win,dist -Dskip.platformToolsetDetection^
643644
-Drequire.openssl -Drequire.test.libhadoop -Pyarn-ui -Dshell-executable=C:\Git\bin\bash.exe^
644645
-Dtar -Dopenssl.prefix=C:\vcpkg\installed\x64-windows^
645646
-Dcmake.prefix.path=C:\vcpkg\installed\x64-windows^
@@ -651,7 +652,7 @@ Assuming that we're still running in the Docker container hadoop-windows-10-buil
651652
following command to create the Apache Hadoop release tarball -
652653

653654
> set IS_WINDOWS=1
654-
> set MVN_ARGS="-Dshell-executable=C:\Git\bin\bash.exe -Dhttps.protocols=TLSv1.2 -Pnative-win -Drequire.openssl -Dopenssl.prefix=C:\vcpkg\installed\x64-windows -Dcmake.prefix.path=C:\vcpkg\installed\x64-windows -Dwindows.cmake.toolchain.file=C:\vcpkg\scripts\buildsystems\vcpkg.cmake -Dwindows.cmake.build.type=RelWithDebInfo -Dwindows.build.hdfspp.dll=off -Duse.platformToolsetVersion=v142 -Dwindows.no.sasl=on -DskipTests -DskipDocs -Drequire.test.libhadoop"
655+
> set MVN_ARGS="-Dshell-executable=C:\Git\bin\bash.exe -Dhttps.protocols=TLSv1.2 -Pnative-win -Dskip.platformToolsetDetection -Drequire.openssl -Dopenssl.prefix=C:\vcpkg\installed\x64-windows -Dcmake.prefix.path=C:\vcpkg\installed\x64-windows -Dwindows.cmake.toolchain.file=C:\vcpkg\scripts\buildsystems\vcpkg.cmake -Dwindows.cmake.build.type=RelWithDebInfo -Dwindows.build.hdfspp.dll=off -Duse.platformToolsetVersion=v142 -Dwindows.no.sasl=on -DskipTests -DskipDocs -Drequire.test.libhadoop"
655656
> C:\Git\bin\bash.exe C:\hadoop\dev-support\bin\create-release --mvnargs=%MVN_ARGS%
656657

657658
Note:

LICENSE-binary

+7-9
Original file line numberDiff line numberDiff line change
@@ -218,12 +218,12 @@ com.aliyun:aliyun-java-sdk-sts:3.0.0
218218
com.aliyun.oss:aliyun-sdk-oss:3.13.2
219219
com.cedarsoftware:java-util:1.9.0
220220
com.cedarsoftware:json-io:2.5.1
221-
com.fasterxml.jackson.core:jackson-annotations:2.12.7
222-
com.fasterxml.jackson.core:jackson-core:2.12.7
223-
com.fasterxml.jackson.core:jackson-databind:2.12.7.1
224-
com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:2.12.7
225-
com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:2.12.7
226-
com.fasterxml.jackson.module:jackson-module-jaxb-annotations:2.12.7
221+
com.fasterxml.jackson.core:jackson-annotations:2.14.3
222+
com.fasterxml.jackson.core:jackson-core:2.14.3
223+
com.fasterxml.jackson.core:jackson-databind:2.14.3
224+
com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:2.14.3
225+
com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:2.14.3
226+
com.fasterxml.jackson.module:jackson-module-jaxb-annotations:2.14.3
227227
com.fasterxml.uuid:java-uuid-generator:3.1.4
228228
com.fasterxml.woodstox:woodstox-core:5.4.0
229229
com.github.ben-manes.caffeine:caffeine:2.9.3
@@ -298,7 +298,6 @@ javax.inject:javax.inject:1
298298
net.java.dev.jna:jna:5.2.0
299299
net.minidev:accessors-smart:1.2
300300
org.apache.avro:avro:1.11.4
301-
org.apache.avro:avro:1.11.3
302301
org.apache.commons:commons-compress:1.26.1
303302
org.apache.commons:commons-configuration2:2.10.1
304303
org.apache.commons:commons-csv:1.9.0
@@ -505,8 +504,7 @@ javax.cache:cache-api:1.1.1
505504
javax.servlet:javax.servlet-api:3.1.0
506505
javax.servlet.jsp:jsp-api:2.1
507506
javax.websocket:javax.websocket-api:1.0
508-
javax.ws.rs:jsr311-api:1.1.1
509-
javax.xml.bind:jaxb-api:2.2.11
507+
javax.xml.bind:jaxb-api:2.3.1
510508

511509
Eclipse Distribution License (EDL) 1.0
512510
--------------------------

dev-support/docker/Dockerfile_windows_10

+5-12
Original file line numberDiff line numberDiff line change
@@ -49,20 +49,13 @@ RUN powershell Copy-Item -Recurse -Path 'C:\Program Files\Git' -Destination C:\G
4949
# hadolint ignore=DL3003
5050
RUN powershell git clone https://github.com/microsoft/vcpkg.git \
5151
&& cd vcpkg \
52-
&& git checkout 7ffa425e1db8b0c3edf9c50f2f3a0f25a324541d \
52+
&& git fetch --all \
53+
&& git checkout 2025.03.19 \
5354
&& .\bootstrap-vcpkg.bat
5455

55-
# Fix wrong download link for 7-zip in .\vcpkg\scripts\vcpkgTools.xml:
56-
# - https://www.nuget.org/api/v2/package/7-Zip.CommandLine/18.1.0 (not working anymore) is replaced with
57-
# - https://globalcdn.nuget.org/packages/7-zip.commandline.18.1.0.nupkg?packageVersion=18.1.0 (working)
58-
# Replacing is done using the Linux tool "sed".
59-
RUN choco install sed -y
60-
RUN sed -i "s,https://www.nuget.org/api/v2/package/7-Zip.CommandLine/18.1.0,https://globalcdn.nuget.org/packages/7-zip.commandline.18.1.0.nupkg?packageVersion=18.1.0,g" .\vcpkg\scripts\vcpkgTools.xml
61-
62-
RUN powershell .\vcpkg\vcpkg.exe install boost:x64-windows
63-
RUN powershell .\vcpkg\vcpkg.exe install protobuf:x64-windows
64-
RUN powershell .\vcpkg\vcpkg.exe install openssl:x64-windows
65-
RUN powershell .\vcpkg\vcpkg.exe install zlib:x64-windows
56+
ADD vcpkg/vcpkg.json .
57+
58+
RUN powershell .\vcpkg\vcpkg.exe install --x-install-root .\vcpkg\installed
6659

6760
# Install Azul Java 8 JDK.
6861
RUN powershell Invoke-WebRequest -URI https://cdn.azul.com/zulu/bin/zulu8.62.0.19-ca-jdk8.0.332-win_x64.zip -OutFile $Env:TEMP\zulu8.62.0.19-ca-jdk8.0.332-win_x64.zip

dev-support/docker/vcpkg/vcpkg.json

+16
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
{
2+
"$schema": "https://raw.githubusercontent.com/microsoft/vcpkg-tool/main/docs/vcpkg.schema.json",
3+
"dependencies": [
4+
"boost",
5+
"protobuf",
6+
"openssl",
7+
"zlib"
8+
],
9+
"builtin-baseline": "289a69379604112a433874fe8b9812dad3103341",
10+
"overrides": [
11+
{
12+
"name": "protobuf",
13+
"version": "3.21.12"
14+
}
15+
]
16+
}

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -162,7 +162,7 @@ public boolean moveToTrash(Path path) throws IOException {
162162
LOG.warn("Can't create(mkdir) trash directory: " + baseTrashPath);
163163
return false;
164164
}
165-
} catch (FileAlreadyExistsException e) {
165+
} catch (FileAlreadyExistsException | ParentNotDirectoryException e) {
166166
// find the path which is not a directory, and modify baseTrashPath
167167
// & trashPath, then mkdirs
168168
Path existsFilePath = baseTrashPath;

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -415,7 +415,7 @@ public void setResponse(Message message) {
415415
long deltaNanos = Time.monotonicNowNanos() - call.getStartHandleTimestampNanos();
416416
updateProcessingDetails(call, deltaNanos);
417417
call.setDeferredResponse(RpcWritable.wrap(message));
418-
server.updateDeferredMetrics(call, methodName, deltaNanos);
418+
server.updateDeferredMetrics(call, methodName);
419419
}
420420

421421
@Override
@@ -424,7 +424,7 @@ public void error(Throwable t) {
424424
updateProcessingDetails(call, deltaNanos);
425425
call.setDeferredError(t);
426426
String detailedMetricsName = t.getClass().getSimpleName();
427-
server.updateDeferredMetrics(call, detailedMetricsName, deltaNanos);
427+
server.updateDeferredMetrics(call, detailedMetricsName);
428428
}
429429
}
430430

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine2.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -447,7 +447,7 @@ public void setResponse(Message message) {
447447
long deltaNanos = Time.monotonicNowNanos() - call.getStartHandleTimestampNanos();
448448
updateProcessingDetails(call, deltaNanos);
449449
call.setDeferredResponse(RpcWritable.wrap(message));
450-
server.updateDeferredMetrics(call, methodName, deltaNanos);
450+
server.updateDeferredMetrics(call, methodName);
451451
}
452452

453453
@Override
@@ -456,7 +456,7 @@ public void error(Throwable t) {
456456
updateProcessingDetails(call, deltaNanos);
457457
call.setDeferredError(t);
458458
String detailedMetricsName = t.getClass().getSimpleName();
459-
server.updateDeferredMetrics(call, detailedMetricsName, deltaNanos);
459+
server.updateDeferredMetrics(call, detailedMetricsName);
460460
}
461461
}
462462

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

+3-2
Original file line numberDiff line numberDiff line change
@@ -673,9 +673,8 @@ void updateMetrics(Call call, long processingStartTimeNanos, boolean connDropped
673673
* Update rpc metrics for defered calls.
674674
* @param call The Rpc Call
675675
* @param name Rpc method name
676-
* @param processingTime processing call in ms unit.
677676
*/
678-
void updateDeferredMetrics(Call call, String name, long processingTime) {
677+
void updateDeferredMetrics(Call call, String name) {
679678
long completionTimeNanos = Time.monotonicNowNanos();
680679
long arrivalTimeNanos = call.timestampNanos;
681680

@@ -684,6 +683,8 @@ void updateDeferredMetrics(Call call, String name, long processingTime) {
684683
details.get(Timing.LOCKWAIT, rpcMetrics.getMetricsTimeUnit());
685684
long responseTime =
686685
details.get(Timing.RESPONSE, rpcMetrics.getMetricsTimeUnit());
686+
long processingTime =
687+
details.get(Timing.PROCESSING, rpcMetrics.getMetricsTimeUnit());
687688
rpcMetrics.addRpcLockWaitTime(waitTime);
688689
rpcMetrics.addRpcProcessingTime(processingTime);
689690
rpcMetrics.addRpcResponseTime(responseTime);

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java

+74-13
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@
2121
import java.io.IOException;
2222
import java.io.InputStream;
2323
import java.io.OutputStream;
24-
import java.lang.reflect.Constructor;
2524
import java.net.BindException;
2625
import java.net.InetAddress;
2726
import java.net.InetSocketAddress;
@@ -46,10 +45,6 @@
4645

4746
import javax.net.SocketFactory;
4847

49-
import org.apache.hadoop.security.AccessControlException;
50-
import org.apache.hadoop.thirdparty.com.google.common.cache.Cache;
51-
import org.apache.hadoop.thirdparty.com.google.common.cache.CacheBuilder;
52-
5348
import org.apache.commons.net.util.SubnetUtils;
5449
import org.apache.commons.net.util.SubnetUtils.SubnetInfo;
5550
import org.apache.hadoop.classification.InterfaceAudience;
@@ -58,9 +53,13 @@
5853
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
5954
import org.apache.hadoop.ipc.Server;
6055
import org.apache.hadoop.ipc.VersionedProtocol;
56+
import org.apache.hadoop.security.AccessControlException;
6157
import org.apache.hadoop.security.SecurityUtil;
58+
import org.apache.hadoop.thirdparty.com.google.common.cache.Cache;
59+
import org.apache.hadoop.thirdparty.com.google.common.cache.CacheBuilder;
6260
import org.apache.hadoop.util.ReflectionUtils;
6361
import org.apache.hadoop.util.Preconditions;
62+
import org.apache.hadoop.util.dynamic.DynConstructors;
6463

6564
import org.slf4j.Logger;
6665
import org.slf4j.LoggerFactory;
@@ -941,10 +940,59 @@ public static IOException wrapException(final String destHost,
941940

942941
}
943942
} catch (IOException ex) {
944-
return (IOException) new IOException("Failed on local exception: "
945-
+ exception + "; Host Details : "
946-
+ getHostDetailsAsString(destHost, destPort, localHost))
947-
.initCause(exception);
943+
try {
944+
return new IOException("Failed on local exception: "
945+
+ exception + "; Host Details : "
946+
+ getHostDetailsAsString(destHost, destPort, localHost), exception);
947+
} catch (Exception ignore) {
948+
// in worst case, return the original exception
949+
return exception;
950+
}
951+
}
952+
}
953+
954+
/**
955+
* Return an @{@link IOException} of the same type as the input exception but with
956+
* a modified exception message that includes the node name.
957+
*
958+
* @param ioe existing exception.
959+
* @param nodeName name of the node.
960+
* @return IOException
961+
*/
962+
public static IOException addNodeNameToIOException(final IOException ioe, final String nodeName) {
963+
try {
964+
final Throwable cause = ioe.getCause();
965+
IOException newIoe = null;
966+
if (cause != null) {
967+
try {
968+
DynConstructors.Ctor<? extends IOException> ctor =
969+
new DynConstructors.Builder()
970+
.impl(ioe.getClass(), String.class, Throwable.class)
971+
.buildChecked();
972+
newIoe = ctor.newInstance(nodeName + ": " + ioe.getMessage(), cause);
973+
} catch (NoSuchMethodException e) {
974+
// no matching constructor - try next approach below
975+
}
976+
}
977+
if (newIoe == null) {
978+
DynConstructors.Ctor<? extends IOException> ctor =
979+
new DynConstructors.Builder()
980+
.impl(ioe.getClass(), String.class)
981+
.buildChecked();
982+
newIoe = ctor.newInstance(nodeName + ": " + ioe.getMessage());
983+
if (cause != null) {
984+
try {
985+
newIoe.initCause(cause);
986+
} catch (Exception e) {
987+
// Unable to initCause. Ignore the exception.
988+
}
989+
}
990+
}
991+
newIoe.setStackTrace(ioe.getStackTrace());
992+
return newIoe;
993+
} catch (Exception e) {
994+
// Unable to create new exception. Return the original exception.
995+
return ioe;
948996
}
949997
}
950998

@@ -957,9 +1005,22 @@ private static <T extends IOException> T wrapWithMessage(
9571005
T exception, String msg) throws T {
9581006
Class<? extends Throwable> clazz = exception.getClass();
9591007
try {
960-
Constructor<? extends Throwable> ctor = clazz.getConstructor(String.class);
961-
Throwable t = ctor.newInstance(msg);
962-
return (T)(t.initCause(exception));
1008+
try {
1009+
DynConstructors.Ctor<T> ctor =
1010+
new DynConstructors.Builder()
1011+
.impl(clazz, String.class, Throwable.class)
1012+
.buildChecked();
1013+
return ctor.newInstance(msg, exception);
1014+
} catch (NoSuchMethodException e) {
1015+
// no matching constructor - try next approach below
1016+
}
1017+
DynConstructors.Ctor<T> ctor =
1018+
new DynConstructors.Builder()
1019+
.impl(clazz, String.class)
1020+
.buildChecked();
1021+
T newException = ctor.newInstance(msg);
1022+
newException.initCause(exception);
1023+
return newException;
9631024
} catch (NoSuchMethodException e) {
9641025
return exception;
9651026
} catch (Throwable e) {
@@ -1114,7 +1175,7 @@ public static Set<Integer> getFreeSocketPorts(int numOfPorts) {
11141175

11151176
/**
11161177
* Return an @{@link InetAddress} to bind to. If bindWildCardAddress is true
1117-
* than returns null.
1178+
* then returns null.
11181179
*
11191180
* @param localAddr local addr.
11201181
* @param bindWildCardAddress bind wildcard address.

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java

+15-16
Original file line numberDiff line numberDiff line change
@@ -24,21 +24,20 @@
2424
import java.util.StringTokenizer;
2525

2626
import org.apache.hadoop.test.GenericTestUtils;
27-
import org.junit.Assert;
28-
27+
import org.junit.jupiter.api.AfterEach;
28+
import org.junit.jupiter.api.BeforeEach;
29+
import org.junit.jupiter.api.Test;
2930
import org.apache.hadoop.fs.permission.FsPermission;
3031
import org.apache.hadoop.security.UserGroupInformation;
3132
import org.apache.hadoop.util.Shell;
3233
import org.apache.hadoop.util.StringUtils;
33-
import org.junit.After;
34-
import org.junit.Before;
35-
import org.junit.Test;
3634
import org.slf4j.event.Level;
3735

3836
import static org.apache.hadoop.fs.FileContextTestHelper.*;
3937
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
40-
import static org.junit.Assert.assertEquals;
41-
import static org.junit.Assert.fail;
38+
import static org.junit.jupiter.api.Assertions.assertEquals;
39+
import static org.junit.jupiter.api.Assertions.assertTrue;
40+
import static org.junit.jupiter.api.Assertions.fail;
4241

4342
/**
4443
* <p>
@@ -52,7 +51,7 @@
5251
* test and override {@link #setUp()} to initialize the <code>fc</code>
5352
* {@link FileContext} instance variable.
5453
*
55-
* Since this a junit 4 you can also do a single setup before
54+
* Since this a junit 4+ you can also do a single setup before
5655
* the start of any tests.
5756
* E.g.
5857
* @BeforeClass public static void clusterSetupAtBegining()
@@ -80,22 +79,22 @@ protected FileContextTestHelper getFileContextHelper() {
8079

8180
protected abstract FileContext getFileContext() throws Exception;
8281

83-
@Before
82+
@BeforeEach
8483
public void setUp() throws Exception {
8584
fileContextTestHelper = getFileContextHelper();
8685
fc = getFileContext();
8786
fc.mkdir(fileContextTestHelper.getTestRootPath(fc), FileContext.DEFAULT_PERM, true);
8887
}
8988

90-
@After
89+
@AfterEach
9190
public void tearDown() throws Exception {
9291
fc.delete(fileContextTestHelper.getTestRootPath(fc), true);
9392
}
9493

9594
private void cleanupFile(FileContext fc, Path name) throws IOException {
96-
Assert.assertTrue(exists(fc, name));
95+
assertTrue(exists(fc, name));
9796
fc.delete(name, true);
98-
Assert.assertTrue(!exists(fc, name));
97+
assertTrue(!exists(fc, name));
9998
}
10099

101100
@Test
@@ -158,12 +157,12 @@ public void testSetOwner() throws IOException {
158157
try {
159158
String g0 = groups.get(0);
160159
fc.setOwner(f, null, g0);
161-
Assert.assertEquals(g0, fc.getFileStatus(f).getGroup());
160+
assertEquals(fc.getFileStatus(f).getGroup(), g0);
162161

163162
if (groups.size() > 1) {
164163
String g1 = groups.get(1);
165164
fc.setOwner(f, null, g1);
166-
Assert.assertEquals(g1, fc.getFileStatus(f).getGroup());
165+
assertEquals(fc.getFileStatus(f).getGroup(), g1);
167166
} else {
168167
System.out.println("Not testing changing the group since user " +
169168
"belongs to only one group.");
@@ -193,7 +192,7 @@ public FileContext run() throws Exception {
193192
}
194193

195194
});
196-
assertEquals("otherUser",newFc.getUgi().getUserName());
195+
assertEquals(newFc.getUgi().getUserName(), "otherUser");
197196
}
198197

199198
static List<String> getGroups() throws IOException {
@@ -207,7 +206,7 @@ static List<String> getGroups() throws IOException {
207206

208207

209208
void doFilePermissionCheck(FsPermission expectedPerm, FsPermission actualPerm) {
210-
Assert.assertEquals(expectedPerm.applyUMask(getFileMask()), actualPerm);
209+
assertEquals(expectedPerm.applyUMask(getFileMask()), actualPerm);
211210
}
212211

213212

0 commit comments

Comments
 (0)