diff --git a/CHANGELOG.md b/CHANGELOG.md index 11c438aac..463d03c5c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -66,6 +66,7 @@ All notable changes to this project will be documented in this file. - spark: Fix CVE-2024-36114 in Spark 3.5.1 by upgrading a dependency. Spark 3.5.2 is not affected. ([#921]) - trino: Correctly report Trino version ([#881]). +- hbase: Fix CVE-2024-36114 in HBase `2.6.0` by upgrading a dependency. ([#925]). - druid: Fix CVE-2024-36114 in Druid `26.0.0` and `30.0.0` by upgrading a dependency ([#926]). [#783]: https://github.com/stackabletech/docker-images/pull/783 @@ -112,6 +113,7 @@ All notable changes to this project will be documented in this file. [#919]: https://github.com/stackabletech/docker-images/pull/919 [#920]: https://github.com/stackabletech/docker-images/pull/920 [#921]: https://github.com/stackabletech/docker-images/pull/921 +[#925]: https://github.com/stackabletech/docker-images/pull/925 [#926]: https://github.com/stackabletech/docker-images/pull/926 ## [24.7.0] - 2024-07-24 diff --git a/hbase/stackable/patches/2.4.12/001-HBASE-27292-2.4.12.patch b/hbase/stackable/patches/2.4.12/001-HBASE-27292-2.4.12.patch deleted file mode 100644 index 55be86270..000000000 --- a/hbase/stackable/patches/2.4.12/001-HBASE-27292-2.4.12.patch +++ /dev/null @@ -1,38 +0,0 @@ -From 0b2d405b81020a7ca7d42b4d1b6318a8e1341b33 Mon Sep 17 00:00:00 2001 -From: Lars Francke -Date: Thu, 19 Oct 2023 23:51:11 +0200 -Subject: [PATCH] HBASE-27292. Fix build failure against Hadoop 3.3.4 due to - added dependency on okhttp. - ---- - hbase-shaded/pom.xml | 14 +++++++++++++- - 1 file changed, 13 insertions(+), 1 deletion(-) - -diff --git a/hbase-shaded/pom.xml b/hbase-shaded/pom.xml -index 5d3b66a9fe5..e990b6810b0 100644 ---- a/hbase-shaded/pom.xml -+++ b/hbase-shaded/pom.xml -@@ -448,7 +448,19 @@ - net/ - ${shaded.prefix}.net. - -- -+ -+ org.agrona -+ ${shaded.prefix}.org.agrona -+ -+ -+ -+ okhttp3. -+ ${shaded.prefix}.okhttp3. -+ -+ -+ kotlin. -+ ${shaded.prefix}.kotlin. -+ - - - -+ -+ com.sun.istack -+ ${shaded.prefix}.com.sun.istack -+ -+ -+ com.sun.jersey -+ ${shaded.prefix}.com.sunjersey -+ -+ -+ com.sun.xml -+ ${shaded.prefix}.com.sun.xml -+ - - com.cedarsoftware - ${shaded.prefix}.com.cedarsoftware --- -2.42.0 - diff --git a/hbase/stackable/patches/2.4.12/003-HBASE-25292-2.4.12.patch b/hbase/stackable/patches/2.4.12/003-HBASE-25292-2.4.12.patch deleted file mode 100644 index c483b6911..000000000 --- a/hbase/stackable/patches/2.4.12/003-HBASE-25292-2.4.12.patch +++ /dev/null @@ -1,1525 +0,0 @@ -From ee6a1a328b4fdc72efe864ce542cbf65b5f52a34 Mon Sep 17 00:00:00 2001 -From: Lars Francke -Date: Fri, 10 Nov 2023 13:18:57 +0100 -Subject: [PATCH] HBASE-25292 Improve InetSocketAddress usage discipline - (#2669) - -Network identities should be bound late. Remote addresses should be -resolved at the last possible moment, just before connect(). Network -identity mappings can change, so our code should not inappropriately -cache them. Otherwise we might miss a change and fail to operate normally. - -Revert "HBASE-14544 Allow HConnectionImpl to not refresh the dns on errors" -Removes hbase.resolve.hostnames.on.failure and related code. We always -resolve hostnames, as late as possible. - -Preserve InetSocketAddress caching per RPC connection. Avoids potential -lookups per Call. - -Replace InetSocketAddress with Address where used as a map key. If we want -to key by hostname and/or resolved address we should be explicit about it. -Using Address chooses mapping by hostname and port only. - -Add metrics for potential nameservice resolution attempts, whenever an -InetSocketAddress is instantiated for connect; and metrics for failed -resolution, whenever InetSocketAddress#isUnresolved on the new instance -is true. - -* Use ServerName directly to build a stub key - -* Resolve and cache ISA on a RpcChannel as late as possible, at first call - -* Remove now invalid unit test TestCIBadHostname - -We resolve DNS at the latest possible time, at first call, and do not -resolve hostnames for creating stubs at all, so this unit test cannot -work now. - -Reviewed-by: Mingliang Liu -Signed-off-by: Duo Zhang ---- - .../hbase/client/AsyncConnectionImpl.java | 9 +- - .../client/ConnectionImplementation.java | 18 ++-- - .../hadoop/hbase/client/ConnectionUtils.java | 28 ++---- - .../hbase/client/MetricsConnection.java | 15 +++ - .../hadoop/hbase/ipc/AbstractRpcClient.java | 95 ++++++++++++------ - .../hbase/ipc/BlockingRpcConnection.java | 47 ++++++--- - .../apache/hadoop/hbase/ipc/ConnectionId.java | 10 +- - .../hadoop/hbase/ipc/FailedServers.java | 17 ++-- - .../org/apache/hadoop/hbase/ipc/IPCUtil.java | 11 +-- - .../hadoop/hbase/ipc/NettyRpcConnection.java | 37 +++++-- - .../apache/hadoop/hbase/ipc/RpcClient.java | 8 +- - .../hadoop/hbase/ipc/RpcConnection.java | 17 ++-- - .../hbase/ipc/ServerTooBusyException.java | 8 ++ - .../client/TestMasterRegistryHedgedReads.java | 6 +- - .../hadoop/hbase/ipc/TestConnectionId.java | 6 +- - .../hbase/ipc/TestFailedServersLog.java | 6 +- - .../apache/hadoop/hbase/ipc/TestIPCUtil.java | 4 +- - .../hbase/ipc/TestNettyRpcConnection.java | 4 +- - .../org/apache/hadoop/hbase/net/Address.java | 39 +++++++- - .../hbase/io/hfile/MemcachedBlockCache.java | 3 + - .../hbase/regionserver/HRegionServer.java | 22 +++-- - .../hbase/client/TestCIBadHostname.java | 99 ------------------- - .../hbase/client/TestClientTimeouts.java | 15 ++- - .../hadoop/hbase/ipc/TestHBaseClient.java | 10 +- - .../apache/hadoop/hbase/zookeeper/ZKUtil.java | 7 +- - 25 files changed, 270 insertions(+), 271 deletions(-) - delete mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCIBadHostname.java - -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java -index 72885ad38e8..26533618652 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java -@@ -76,8 +76,6 @@ class AsyncConnectionImpl implements AsyncConnection { - .setUncaughtExceptionHandler(Threads.LOGGING_EXCEPTION_HANDLER).build(), - 10, TimeUnit.MILLISECONDS); - -- private static final String RESOLVE_HOSTNAME_ON_FAIL_KEY = "hbase.resolve.hostnames.on.failure"; -- - private final Configuration conf; - - final AsyncConnectionConfiguration connConf; -@@ -92,8 +90,6 @@ class AsyncConnectionImpl implements AsyncConnection { - - final RpcControllerFactory rpcControllerFactory; - -- private final boolean hostnameCanChange; -- - private final AsyncRegionLocator locator; - - final AsyncRpcRetryingCallerFactory callerFactory; -@@ -136,7 +132,6 @@ class AsyncConnectionImpl implements AsyncConnection { - } - this.rpcClient = RpcClientFactory.createClient(conf, clusterId, metrics.orElse(null)); - this.rpcControllerFactory = RpcControllerFactory.instantiate(conf); -- this.hostnameCanChange = conf.getBoolean(RESOLVE_HOSTNAME_ON_FAIL_KEY, true); - this.rpcTimeout = - (int) Math.min(Integer.MAX_VALUE, TimeUnit.NANOSECONDS.toMillis(connConf.getRpcTimeoutNs())); - this.locator = new AsyncRegionLocator(this, RETRY_TIMER); -@@ -259,7 +254,7 @@ class AsyncConnectionImpl implements AsyncConnection { - - ClientService.Interface getRegionServerStub(ServerName serverName) throws IOException { - return ConcurrentMapUtils.computeIfAbsentEx(rsStubs, -- getStubKey(ClientService.getDescriptor().getName(), serverName, hostnameCanChange), -+ getStubKey(ClientService.getDescriptor().getName(), serverName), - () -> createRegionServerStub(serverName)); - } - -@@ -273,7 +268,7 @@ class AsyncConnectionImpl implements AsyncConnection { - - AdminService.Interface getAdminStub(ServerName serverName) throws IOException { - return ConcurrentMapUtils.computeIfAbsentEx(adminSubs, -- getStubKey(AdminService.getDescriptor().getName(), serverName, hostnameCanChange), -+ getStubKey(AdminService.getDescriptor().getName(), serverName), - () -> createAdminServerStub(serverName)); - } - -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java -index 145dc913396..78baaad00eb 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java -@@ -158,9 +158,6 @@ class ConnectionImplementation implements ClusterConnection, Closeable { - public static final String RETRIES_BY_SERVER_KEY = "hbase.client.retries.by.server"; - private static final Logger LOG = LoggerFactory.getLogger(ConnectionImplementation.class); - -- private static final String RESOLVE_HOSTNAME_ON_FAIL_KEY = "hbase.resolve.hostnames.on.failure"; -- -- private final boolean hostnamesCanChange; - private final long pause; - private final long pauseForCQTBE;// pause for CallQueueTooBigException, if specified - // The mode tells if HedgedRead, LoadBalance mode is supported. -@@ -296,9 +293,8 @@ class ConnectionImplementation implements ClusterConnection, Closeable { - } - this.metaCache = new MetaCache(this.metrics); - -- boolean shouldListen = conf.getBoolean(HConstants.STATUS_PUBLISHED, -- HConstants.STATUS_PUBLISHED_DEFAULT); -- this.hostnamesCanChange = conf.getBoolean(RESOLVE_HOSTNAME_ON_FAIL_KEY, true); -+ boolean shouldListen = -+ conf.getBoolean(HConstants.STATUS_PUBLISHED, HConstants.STATUS_PUBLISHED_DEFAULT); - Class listenerClass = - conf.getClass(ClusterStatusListener.STATUS_LISTENER_CLASS, - ClusterStatusListener.DEFAULT_STATUS_LISTENER_CLASS, -@@ -474,7 +470,7 @@ class ConnectionImplementation implements ClusterConnection, Closeable { - throw new RegionServerStoppedException(masterServer + " is dead."); - } - String key = getStubKey(MasterProtos.HbckService.BlockingInterface.class.getName(), -- masterServer, this.hostnamesCanChange); -+ masterServer); - - return new HBaseHbck( - (MasterProtos.HbckService.BlockingInterface) computeIfAbsentEx(stubs, key, () -> { -@@ -1254,8 +1250,7 @@ class ConnectionImplementation implements ClusterConnection, Closeable { - throw new MasterNotRunningException(sn + " is dead."); - } - // Use the security info interface name as our stub key -- String key = -- getStubKey(MasterProtos.MasterService.getDescriptor().getName(), sn, hostnamesCanChange); -+ String key = getStubKey(MasterProtos.MasterService.getDescriptor().getName(), sn); - MasterProtos.MasterService.BlockingInterface stub = - (MasterProtos.MasterService.BlockingInterface) computeIfAbsentEx(stubs, key, () -> { - BlockingRpcChannel channel = rpcClient.createBlockingRpcChannel(sn, user, rpcTimeout); -@@ -1303,8 +1298,7 @@ class ConnectionImplementation implements ClusterConnection, Closeable { - if (isDeadServer(serverName)) { - throw new RegionServerStoppedException(serverName + " is dead."); - } -- String key = getStubKey(AdminProtos.AdminService.BlockingInterface.class.getName(), serverName, -- this.hostnamesCanChange); -+ String key = getStubKey(AdminProtos.AdminService.BlockingInterface.class.getName(), serverName); - return (AdminProtos.AdminService.BlockingInterface) computeIfAbsentEx(stubs, key, () -> { - BlockingRpcChannel channel = - this.rpcClient.createBlockingRpcChannel(serverName, user, rpcTimeout); -@@ -1319,7 +1313,7 @@ class ConnectionImplementation implements ClusterConnection, Closeable { - throw new RegionServerStoppedException(serverName + " is dead."); - } - String key = getStubKey(ClientProtos.ClientService.BlockingInterface.class.getName(), -- serverName, this.hostnamesCanChange); -+ serverName); - return (ClientProtos.ClientService.BlockingInterface) computeIfAbsentEx(stubs, key, () -> { - BlockingRpcChannel channel = - this.rpcClient.createBlockingRpcChannel(serverName, user, rpcTimeout); -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java -index 6f3fa23f67d..132a915a78a 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java -@@ -25,6 +25,7 @@ import static org.apache.hadoop.hbase.util.FutureUtils.addListener; - import java.io.IOException; - import java.lang.reflect.UndeclaredThrowableException; - import java.net.InetAddress; -+import java.net.InetSocketAddress; - import java.net.UnknownHostException; - import java.util.Arrays; - import java.util.List; -@@ -150,32 +151,17 @@ public final class ConnectionUtils { - } - - /** -- * Return retires + 1. The returned value will be in range [1, Integer.MAX_VALUE]. -+ * Get a unique key for the rpc stub to the given server. - */ -- static int retries2Attempts(int retries) { -- return Math.max(1, retries == Integer.MAX_VALUE ? Integer.MAX_VALUE : retries + 1); -+ static String getStubKey(String serviceName, ServerName serverName) { -+ return String.format("%s@%s", serviceName, serverName); - } - - /** -- * Get a unique key for the rpc stub to the given server. -+ * Return retires + 1. The returned value will be in range [1, Integer.MAX_VALUE]. - */ -- static String getStubKey(String serviceName, ServerName serverName, boolean hostnameCanChange) { -- // Sometimes, servers go down and they come back up with the same hostname but a different -- // IP address. Force a resolution of the rsHostname by trying to instantiate an -- // InetSocketAddress, and this way we will rightfully get a new stubKey. -- // Also, include the hostname in the key so as to take care of those cases where the -- // DNS name is different but IP address remains the same. -- String hostname = serverName.getHostname(); -- int port = serverName.getPort(); -- if (hostnameCanChange) { -- try { -- InetAddress ip = InetAddress.getByName(hostname); -- return serviceName + "@" + hostname + "-" + ip.getHostAddress() + ":" + port; -- } catch (UnknownHostException e) { -- LOG.warn("Can not resolve " + hostname + ", please check your network", e); -- } -- } -- return serviceName + "@" + hostname + ":" + port; -+ static int retries2Attempts(int retries) { -+ return Math.max(1, retries == Integer.MAX_VALUE ? Integer.MAX_VALUE : retries + 1); - } - - static void checkHasFamilies(Mutation mutation) { -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java -index 993c6caae1a..1f03a386346 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java -@@ -66,6 +66,8 @@ public class MetricsConnection implements StatisticTrackable { - private static final String HEAP_BASE = "heapOccupancy_"; - private static final String CACHE_BASE = "cacheDroppingExceptions_"; - private static final String UNKNOWN_EXCEPTION = "UnknownException"; -+ private static final String NS_LOOKUPS = "nsLookups"; -+ private static final String NS_LOOKUPS_FAILED = "nsLookupsFailed"; - private static final String CLIENT_SVC = ClientService.getDescriptor().getName(); - - /** A container class for collecting details about the RPC call as it percolates. */ -@@ -288,6 +290,9 @@ public class MetricsConnection implements StatisticTrackable { - protected final Counter hedgedReadWin; - protected final Histogram concurrentCallsPerServerHist; - protected final Histogram numActionsPerServerHist; -+ protected final Counter nsLookups; -+ protected final Counter nsLookupsFailed; -+ - - // dynamic metrics - -@@ -350,6 +355,8 @@ public class MetricsConnection implements StatisticTrackable { - "concurrentCallsPerServer", scope)); - this.numActionsPerServerHist = registry.histogram(name(MetricsConnection.class, - "numActionsPerServer", scope)); -+ this.nsLookups = registry.counter(name(this.getClass(), NS_LOOKUPS, scope)); -+ this.nsLookupsFailed = registry.counter(name(this.getClass(), NS_LOOKUPS_FAILED, scope)); - - this.reporter = JmxReporter.forRegistry(this.registry).build(); - this.reporter.start(); -@@ -518,4 +525,12 @@ public class MetricsConnection implements StatisticTrackable { - (exception == null? UNKNOWN_EXCEPTION : exception.getClass().getSimpleName()), - cacheDroppingExceptions, counterFactory).inc(); - } -+ -+ public void incrNsLookups() { -+ this.nsLookups.inc(); -+ } -+ -+ public void incrNsLookupsFailed() { -+ this.nsLookupsFailed.inc(); -+ } - } -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java -index a57672f02ed..e4f0a7a36f4 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java -@@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.ServerName; - import org.apache.hadoop.hbase.client.MetricsConnection; - import org.apache.hadoop.hbase.codec.Codec; - import org.apache.hadoop.hbase.codec.KeyValueCodec; -+import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.security.User; - import org.apache.hadoop.hbase.security.UserProvider; - import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -@@ -134,10 +135,10 @@ public abstract class AbstractRpcClient implements RpcC - - private int maxConcurrentCallsPerServer; - -- private static final LoadingCache concurrentCounterCache = -+ private static final LoadingCache concurrentCounterCache = - CacheBuilder.newBuilder().expireAfterAccess(1, TimeUnit.HOURS). -- build(new CacheLoader() { -- @Override public AtomicInteger load(InetSocketAddress key) throws Exception { -+ build(new CacheLoader() { -+ @Override public AtomicInteger load(Address key) throws Exception { - return new AtomicInteger(0); - } - }); -@@ -206,7 +207,7 @@ public abstract class AbstractRpcClient implements RpcC - // The connection itself will disconnect if there is no pending call for maxIdleTime. - if (conn.getLastTouched() < closeBeforeTime && !conn.isActive()) { - if (LOG.isTraceEnabled()) { -- LOG.trace("Cleanup idle connection to {}", conn.remoteId().address); -+ LOG.trace("Cleanup idle connection to {}", conn.remoteId().getAddress()); - } - connections.remove(conn.remoteId(), conn); - conn.cleanupConnection(); -@@ -343,11 +344,11 @@ public abstract class AbstractRpcClient implements RpcC - private T getConnection(ConnectionId remoteId) throws IOException { - if (failedServers.isFailedServer(remoteId.getAddress())) { - if (LOG.isDebugEnabled()) { -- LOG.debug("Not trying to connect to " + remoteId.address -+ LOG.debug("Not trying to connect to " + remoteId.getAddress() - + " this server is in the failed servers list"); - } - throw new FailedServerException( -- "This server is in the failed servers list: " + remoteId.address); -+ "This server is in the failed servers list: " + remoteId.getAddress()); - } - T conn; - synchronized (connections) { -@@ -365,7 +366,7 @@ public abstract class AbstractRpcClient implements RpcC - */ - protected abstract T createConnection(ConnectionId remoteId) throws IOException; - -- private void onCallFinished(Call call, HBaseRpcController hrc, InetSocketAddress addr, -+ private void onCallFinished(Call call, HBaseRpcController hrc, Address addr, - RpcCallback callback) { - call.callStats.setCallTimeMs(EnvironmentEdgeManager.currentTime() - call.getStartTime()); - if (metrics != null) { -@@ -390,8 +391,8 @@ public abstract class AbstractRpcClient implements RpcC - } - - Call callMethod(final Descriptors.MethodDescriptor md, final HBaseRpcController hrc, -- final Message param, Message returnType, final User ticket, final InetSocketAddress addr, -- final RpcCallback callback) { -+ final Message param, Message returnType, final User ticket, -+ final InetSocketAddress inetAddr, final RpcCallback callback) { - final MetricsConnection.CallStats cs = MetricsConnection.newCallStats(); - cs.setStartTime(EnvironmentEdgeManager.currentTime()); - -@@ -405,6 +406,7 @@ public abstract class AbstractRpcClient implements RpcC - cs.setNumActionsPerServer(numActions); - } - -+ final Address addr = Address.fromSocketAddress(inetAddr); - final AtomicInteger counter = concurrentCounterCache.getUnchecked(addr); - Call call = new Call(nextCallId(), md, param, hrc.cellScanner(), returnType, - hrc.getCallTimeout(), hrc.getPriority(), new RpcCallback() { -@@ -429,12 +431,8 @@ public abstract class AbstractRpcClient implements RpcC - return call; - } - -- InetSocketAddress createAddr(ServerName sn) throws UnknownHostException { -- InetSocketAddress addr = new InetSocketAddress(sn.getHostname(), sn.getPort()); -- if (addr.isUnresolved()) { -- throw new UnknownHostException("can not resolve " + sn.getServerName()); -- } -- return addr; -+ private static Address createAddr(ServerName sn) { -+ return Address.fromParts(sn.getHostname(), sn.getPort()); - } - - /** -@@ -449,8 +447,10 @@ public abstract class AbstractRpcClient implements RpcC - synchronized (connections) { - for (T connection : connections.values()) { - ConnectionId remoteId = connection.remoteId(); -- if (remoteId.address.getPort() == sn.getPort() -- && remoteId.address.getHostName().equals(sn.getHostname())) { -+ if ( -+ remoteId.getAddress().getPort() == sn.getPort() -+ && remoteId.getAddress().getHostname().equals(sn.getHostname()) -+ ) { - LOG.info("The server on " + sn.toString() + " is dead - stopping the connection " - + connection.remoteId); - connections.remove(remoteId, connection); -@@ -509,19 +509,25 @@ public abstract class AbstractRpcClient implements RpcC - - @Override - public BlockingRpcChannel createBlockingRpcChannel(final ServerName sn, final User ticket, -- int rpcTimeout) throws UnknownHostException { -+ int rpcTimeout) { - return new BlockingRpcChannelImplementation(this, createAddr(sn), ticket, rpcTimeout); - } - - @Override -- public RpcChannel createRpcChannel(ServerName sn, User user, int rpcTimeout) -- throws UnknownHostException { -+ public RpcChannel createRpcChannel(ServerName sn, User user, int rpcTimeout) { - return new RpcChannelImplementation(this, createAddr(sn), user, rpcTimeout); - } - - private static class AbstractRpcChannel { - -- protected final InetSocketAddress addr; -+ protected final Address addr; -+ -+ // We cache the resolved InetSocketAddress for the channel so we do not do a DNS lookup -+ // per method call on the channel. If the remote target is removed or reprovisioned and -+ // its identity changes a new channel with a newly resolved InetSocketAddress will be -+ // created as part of retry, so caching here is fine. -+ // Normally, caching an InetSocketAddress is an anti-pattern. -+ protected InetSocketAddress isa; - - protected final AbstractRpcClient rpcClient; - -@@ -529,7 +535,7 @@ public abstract class AbstractRpcClient implements RpcC - - protected final int rpcTimeout; - -- protected AbstractRpcChannel(AbstractRpcClient rpcClient, InetSocketAddress addr, -+ protected AbstractRpcChannel(AbstractRpcClient rpcClient, Address addr, - User ticket, int rpcTimeout) { - this.addr = addr; - this.rpcClient = rpcClient; -@@ -566,15 +572,29 @@ public abstract class AbstractRpcClient implements RpcC - implements BlockingRpcChannel { - - protected BlockingRpcChannelImplementation(AbstractRpcClient rpcClient, -- InetSocketAddress addr, User ticket, int rpcTimeout) { -+ Address addr, User ticket, int rpcTimeout) { - super(rpcClient, addr, ticket, rpcTimeout); - } - - @Override - public Message callBlockingMethod(Descriptors.MethodDescriptor md, RpcController controller, - Message param, Message returnType) throws ServiceException { -+ // Look up remote address upon first call -+ if (isa == null) { -+ if (this.rpcClient.metrics != null) { -+ this.rpcClient.metrics.incrNsLookups(); -+ } -+ isa = Address.toSocketAddress(addr); -+ if (isa.isUnresolved()) { -+ if (this.rpcClient.metrics != null) { -+ this.rpcClient.metrics.incrNsLookupsFailed(); -+ } -+ isa = null; -+ throw new ServiceException(new UnknownHostException(addr + " could not be resolved")); -+ } -+ } - return rpcClient.callBlockingMethod(md, configureRpcController(controller), -- param, returnType, ticket, addr); -+ param, returnType, ticket, isa); - } - } - -@@ -584,20 +604,35 @@ public abstract class AbstractRpcClient implements RpcC - public static class RpcChannelImplementation extends AbstractRpcChannel implements - RpcChannel { - -- protected RpcChannelImplementation(AbstractRpcClient rpcClient, InetSocketAddress addr, -- User ticket, int rpcTimeout) throws UnknownHostException { -+ protected RpcChannelImplementation(AbstractRpcClient rpcClient, Address addr, -+ User ticket, int rpcTimeout) { - super(rpcClient, addr, ticket, rpcTimeout); - } - - @Override - public void callMethod(Descriptors.MethodDescriptor md, RpcController controller, - Message param, Message returnType, RpcCallback done) { -+ HBaseRpcController configuredController = -+ configureRpcController(Preconditions.checkNotNull(controller, -+ "RpcController can not be null for async rpc call")); -+ // Look up remote address upon first call -+ if (isa == null || isa.isUnresolved()) { -+ if (this.rpcClient.metrics != null) { -+ this.rpcClient.metrics.incrNsLookups(); -+ } -+ isa = Address.toSocketAddress(addr); -+ if (isa.isUnresolved()) { -+ if (this.rpcClient.metrics != null) { -+ this.rpcClient.metrics.incrNsLookupsFailed(); -+ } -+ isa = null; -+ controller.setFailed(addr + " could not be resolved"); -+ return; -+ } -+ } - // This method does not throw any exceptions, so the caller must provide a - // HBaseRpcController which is used to pass the exceptions. -- this.rpcClient.callMethod(md, -- configureRpcController(Preconditions.checkNotNull(controller, -- "RpcController can not be null for async rpc call")), -- param, returnType, ticket, addr, done); -+ this.rpcClient.callMethod(md, configuredController, param, returnType, ticket, isa, done); - } - } - } -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java -index 6d4babee745..ce2bd11f960 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java -@@ -32,6 +32,7 @@ import java.io.IOException; - import java.io.InputStream; - import java.io.InterruptedIOException; - import java.io.OutputStream; -+import java.net.InetSocketAddress; - import java.net.Socket; - import java.net.SocketTimeoutException; - import java.net.UnknownHostException; -@@ -51,6 +52,7 @@ import org.apache.hadoop.hbase.exceptions.ConnectionClosingException; - import org.apache.hadoop.hbase.io.ByteArrayOutputStream; - import org.apache.hadoop.hbase.ipc.HBaseRpcController.CancellationCallback; - import org.apache.hadoop.hbase.log.HBaseMarkers; -+import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.security.HBaseSaslRpcClient; - import org.apache.hadoop.hbase.security.SaslUtil; - import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection; -@@ -207,7 +209,7 @@ class BlockingRpcConnection extends RpcConnection implements Runnable { - */ - public void cleanup(IOException e) { - IOException ie = new ConnectionClosingException( -- "Connection to " + remoteId.address + " is closing."); -+ "Connection to " + remoteId.getAddress() + " is closing."); - for (Call call : callsToWrite) { - call.setException(ie); - } -@@ -217,12 +219,9 @@ class BlockingRpcConnection extends RpcConnection implements Runnable { - - BlockingRpcConnection(BlockingRpcClient rpcClient, ConnectionId remoteId) throws IOException { - super(rpcClient.conf, AbstractRpcClient.WHEEL_TIMER, remoteId, rpcClient.clusterId, -- rpcClient.userProvider.isHBaseSecurityEnabled(), rpcClient.codec, rpcClient.compressor); -+ rpcClient.userProvider.isHBaseSecurityEnabled(), rpcClient.codec, rpcClient.compressor, -+ rpcClient.metrics); - this.rpcClient = rpcClient; -- if (remoteId.getAddress().isUnresolved()) { -- throw new UnknownHostException("unknown host: " + remoteId.getAddress().getHostName()); -- } -- - this.connectionHeaderPreamble = getConnectionHeaderPreamble(); - ConnectionHeader header = getConnectionHeader(); - ByteArrayOutputStream baos = new ByteArrayOutputStream(4 + header.getSerializedSize()); -@@ -257,7 +256,17 @@ class BlockingRpcConnection extends RpcConnection implements Runnable { - if (this.rpcClient.localAddr != null) { - this.socket.bind(this.rpcClient.localAddr); - } -- NetUtils.connect(this.socket, remoteId.getAddress(), this.rpcClient.connectTO); -+ if (this.rpcClient.metrics != null) { -+ this.rpcClient.metrics.incrNsLookups(); -+ } -+ InetSocketAddress remoteAddr = Address.toSocketAddress(remoteId.getAddress()); -+ if (remoteAddr.isUnresolved()) { -+ if (this.rpcClient.metrics != null) { -+ this.rpcClient.metrics.incrNsLookupsFailed(); -+ } -+ throw new UnknownHostException(remoteId.getAddress() + " could not be resolved"); -+ } -+ NetUtils.connect(this.socket, remoteAddr, this.rpcClient.connectTO); - this.socket.setSoTimeout(this.rpcClient.readTO); - return; - } catch (SocketTimeoutException toe) { -@@ -362,8 +371,18 @@ class BlockingRpcConnection extends RpcConnection implements Runnable { - - private boolean setupSaslConnection(final InputStream in2, final OutputStream out2) - throws IOException { -+ if (this.metrics != null) { -+ this.metrics.incrNsLookups(); -+ } -+ InetSocketAddress serverAddr = Address.toSocketAddress(remoteId.getAddress()); -+ if (serverAddr.isUnresolved()) { -+ if (this.metrics != null) { -+ this.metrics.incrNsLookupsFailed(); -+ } -+ throw new UnknownHostException(remoteId.getAddress() + " could not be resolved"); -+ } - saslRpcClient = new HBaseSaslRpcClient(this.rpcClient.conf, provider, token, -- serverAddress, securityInfo, this.rpcClient.fallbackAllowed, -+ serverAddr.getAddress(), securityInfo, this.rpcClient.fallbackAllowed, - this.rpcClient.conf.get("hbase.rpc.protection", - QualityOfProtection.AUTHENTICATION.name().toLowerCase(Locale.ROOT)), - this.rpcClient.conf.getBoolean(CRYPTO_AES_ENABLED_KEY, CRYPTO_AES_ENABLED_DEFAULT)); -@@ -440,16 +459,16 @@ class BlockingRpcConnection extends RpcConnection implements Runnable { - - if (this.rpcClient.failedServers.isFailedServer(remoteId.getAddress())) { - if (LOG.isDebugEnabled()) { -- LOG.debug("Not trying to connect to " + remoteId.address -+ LOG.debug("Not trying to connect to " + remoteId.getAddress() - + " this server is in the failed servers list"); - } - throw new FailedServerException( -- "This server is in the failed servers list: " + remoteId.address); -+ "This server is in the failed servers list: " + remoteId.getAddress()); - } - - try { - if (LOG.isDebugEnabled()) { -- LOG.debug("Connecting to " + remoteId.address); -+ LOG.debug("Connecting to " + remoteId.getAddress()); - } - - short numRetries = 0; -@@ -504,14 +523,14 @@ class BlockingRpcConnection extends RpcConnection implements Runnable { - closeSocket(); - IOException e = ExceptionUtil.asInterrupt(t); - if (e == null) { -- this.rpcClient.failedServers.addToFailedServers(remoteId.address, t); -+ this.rpcClient.failedServers.addToFailedServers(remoteId.getAddress(), t); - if (t instanceof LinkageError) { - // probably the hbase hadoop version does not match the running hadoop version - e = new DoNotRetryIOException(t); - } else if (t instanceof IOException) { - e = (IOException) t; - } else { -- e = new IOException("Could not set up IO Streams to " + remoteId.address, t); -+ e = new IOException("Could not set up IO Streams to " + remoteId.getAddress(), t); - } - } - throw e; -@@ -768,7 +787,7 @@ class BlockingRpcConnection extends RpcConnection implements Runnable { - if (callSender != null) { - callSender.interrupt(); - } -- closeConn(new IOException("connection to " + remoteId.address + " closed")); -+ closeConn(new IOException("connection to " + remoteId.getAddress() + " closed")); - } - - @Override -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionId.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionId.java -index 1396f1e7abc..cac9ff27382 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionId.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionId.java -@@ -17,9 +17,9 @@ - */ - package org.apache.hadoop.hbase.ipc; - --import java.net.InetSocketAddress; - import java.util.Objects; - -+import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.security.User; - import org.apache.yetus.audience.InterfaceAudience; - -@@ -32,9 +32,9 @@ class ConnectionId { - private static final int PRIME = 16777619; - final User ticket; - final String serviceName; -- final InetSocketAddress address; -+ final Address address; - -- public ConnectionId(User ticket, String serviceName, InetSocketAddress address) { -+ public ConnectionId(User ticket, String serviceName, Address address) { - this.address = address; - this.ticket = ticket; - this.serviceName = serviceName; -@@ -44,7 +44,7 @@ class ConnectionId { - return this.serviceName; - } - -- public InetSocketAddress getAddress() { -+ public Address getAddress() { - return address; - } - -@@ -73,7 +73,7 @@ class ConnectionId { - return hashCode(ticket,serviceName,address); - } - -- public static int hashCode(User ticket, String serviceName, InetSocketAddress address) { -+ public static int hashCode(User ticket, String serviceName, Address address) { - return (address.hashCode() + - PRIME * (PRIME * serviceName.hashCode() ^ - (ticket == null ? 0 : ticket.hashCode()))); -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FailedServers.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FailedServers.java -index 86b763b91b0..1a8bc0129ea 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FailedServers.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FailedServers.java -@@ -17,7 +17,6 @@ - */ - package org.apache.hadoop.hbase.ipc; - --import java.net.InetSocketAddress; - import java.util.HashMap; - import java.util.Map; - -@@ -25,6 +24,7 @@ import org.apache.hadoop.conf.Configuration; - import org.apache.yetus.audience.InterfaceAudience; - import org.slf4j.Logger; - import org.slf4j.LoggerFactory; -+import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; - - /** -@@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; - */ - @InterfaceAudience.Private - public class FailedServers { -- private final Map failedServers = new HashMap(); -+ private final Map failedServers = new HashMap(); - private long latestExpiry = 0; - private final int recheckServersTimeout; - private static final Logger LOG = LoggerFactory.getLogger(FailedServers.class); -@@ -45,13 +45,13 @@ public class FailedServers { - /** - * Add an address to the list of the failed servers list. - */ -- public synchronized void addToFailedServers(InetSocketAddress address, Throwable throwable) { -+ public synchronized void addToFailedServers(Address address, Throwable throwable) { - final long expiry = EnvironmentEdgeManager.currentTime() + recheckServersTimeout; -- this.failedServers.put(address.toString(), expiry); -+ this.failedServers.put(address, expiry); - this.latestExpiry = expiry; - if (LOG.isDebugEnabled()) { - LOG.debug( -- "Added failed server with address " + address.toString() + " to list caused by " -+ "Added failed server with address " + address + " to list caused by " - + throwable.toString()); - } - } -@@ -61,7 +61,7 @@ public class FailedServers { - * - * @return true if the server is in the failed servers list - */ -- public synchronized boolean isFailedServer(final InetSocketAddress address) { -+ public synchronized boolean isFailedServer(final Address address) { - if (failedServers.isEmpty()) { - return false; - } -@@ -70,15 +70,14 @@ public class FailedServers { - failedServers.clear(); - return false; - } -- String key = address.toString(); -- Long expiry = this.failedServers.get(key); -+ Long expiry = this.failedServers.get(address); - if (expiry == null) { - return false; - } - if (expiry >= now) { - return true; - } else { -- this.failedServers.remove(key); -+ this.failedServers.remove(address); - } - return false; - } -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java -index 11d150e0878..6abf4bb1f3e 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java -@@ -21,7 +21,6 @@ import java.io.IOException; - import java.io.OutputStream; - import java.lang.reflect.InvocationTargetException; - import java.net.ConnectException; --import java.net.InetSocketAddress; - import java.net.SocketTimeoutException; - import java.nio.channels.ClosedChannelException; - import java.util.concurrent.TimeoutException; -@@ -34,6 +33,7 @@ import org.apache.hadoop.hbase.exceptions.ClientExceptionsUtil; - import org.apache.hadoop.hbase.exceptions.ConnectionClosedException; - import org.apache.hadoop.hbase.exceptions.ConnectionClosingException; - import org.apache.hadoop.hbase.exceptions.TimeoutIOException; -+import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.util.Bytes; - import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; - import org.apache.hadoop.ipc.RemoteException; -@@ -160,9 +160,9 @@ class IPCUtil { - } - } - -- private static String getCallTarget(InetSocketAddress addr, RegionInfo regionInfo) { -- return "address=" + addr + -- (regionInfo != null? ", region=" + regionInfo.getRegionNameAsString(): ""); -+ private static String getCallTarget(Address addr, RegionInfo regionInfo) { -+ return "address=" + addr -+ + (regionInfo != null ? ", region=" + regionInfo.getRegionNameAsString() : ""); - } - - /** -@@ -182,8 +182,7 @@ class IPCUtil { - * @return an exception to throw - * @see ClientExceptionsUtil#isConnectionException(Throwable) - */ -- static IOException wrapException(InetSocketAddress addr, RegionInfo regionInfo, -- Throwable error) { -+ static IOException wrapException(Address addr, RegionInfo regionInfo, Throwable error) { - if (error instanceof ConnectException) { - // connection refused; include the host:port in the error - return (IOException) new ConnectException("Call to " + getCallTarget(addr, regionInfo) + -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java -index fc9f9793021..609d2c12cea 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java -@@ -24,12 +24,15 @@ import static org.apache.hadoop.hbase.ipc.IPCUtil.setCancelled; - import static org.apache.hadoop.hbase.ipc.IPCUtil.toIOE; - - import java.io.IOException; -+import java.net.InetSocketAddress; -+import java.net.UnknownHostException; - import java.util.concurrent.Executors; - import java.util.concurrent.ScheduledExecutorService; - import java.util.concurrent.ThreadLocalRandom; - import java.util.concurrent.TimeUnit; - import org.apache.hadoop.hbase.ipc.BufferCallBeforeInitHandler.BufferCallEvent; - import org.apache.hadoop.hbase.ipc.HBaseRpcController.CancellationCallback; -+import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.security.NettyHBaseRpcConnectionHeaderHandler; - import org.apache.hadoop.hbase.security.NettyHBaseSaslRpcClientHandler; - import org.apache.hadoop.hbase.security.SaslChallengeDecoder; -@@ -97,7 +100,8 @@ class NettyRpcConnection extends RpcConnection { - - NettyRpcConnection(NettyRpcClient rpcClient, ConnectionId remoteId) throws IOException { - super(rpcClient.conf, AbstractRpcClient.WHEEL_TIMER, remoteId, rpcClient.clusterId, -- rpcClient.userProvider.isHBaseSecurityEnabled(), rpcClient.codec, rpcClient.compressor); -+ rpcClient.userProvider.isHBaseSecurityEnabled(), rpcClient.codec, rpcClient.compressor, -+ rpcClient.metrics); - this.rpcClient = rpcClient; - this.eventLoop = rpcClient.group.next(); - byte[] connectionHeaderPreamble = getConnectionHeaderPreamble(); -@@ -206,8 +210,18 @@ class NettyRpcConnection extends RpcConnection { - Promise saslPromise = ch.eventLoop().newPromise(); - final NettyHBaseSaslRpcClientHandler saslHandler; - try { -+ if (this.metrics != null) { -+ this.metrics.incrNsLookups(); -+ } -+ InetSocketAddress serverAddr = Address.toSocketAddress(remoteId.getAddress()); -+ if (serverAddr.isUnresolved()) { -+ if (this.metrics != null) { -+ this.metrics.incrNsLookupsFailed(); -+ } -+ throw new UnknownHostException(remoteId.getAddress() + " could not be resolved"); -+ } - saslHandler = new NettyHBaseSaslRpcClientHandler(saslPromise, ticket, provider, token, -- serverAddress, securityInfo, rpcClient.fallbackAllowed, this.rpcClient.conf); -+ serverAddr.getAddress(), securityInfo, rpcClient.fallbackAllowed, this.rpcClient.conf); - } catch (IOException e) { - failInit(ch, e); - return; -@@ -265,23 +279,32 @@ class NettyRpcConnection extends RpcConnection { - }); - } - -- private void connect() { -+ private void connect() throws UnknownHostException { - assert eventLoop.inEventLoop(); -- LOG.trace("Connecting to {}", remoteId.address); -- -+ LOG.trace("Connecting to {}", remoteId.getAddress()); -+ if (this.rpcClient.metrics != null) { -+ this.rpcClient.metrics.incrNsLookups(); -+ } -+ InetSocketAddress remoteAddr = Address.toSocketAddress(remoteId.getAddress()); -+ if (remoteAddr.isUnresolved()) { -+ if (this.rpcClient.metrics != null) { -+ this.rpcClient.metrics.incrNsLookupsFailed(); -+ } -+ throw new UnknownHostException(remoteId.getAddress() + " could not be resolved"); -+ } - this.channel = new Bootstrap().group(eventLoop).channel(rpcClient.channelClass) - .option(ChannelOption.TCP_NODELAY, rpcClient.isTcpNoDelay()) - .option(ChannelOption.SO_KEEPALIVE, rpcClient.tcpKeepAlive) - .option(ChannelOption.CONNECT_TIMEOUT_MILLIS, rpcClient.connectTO) - .handler(new BufferCallBeforeInitHandler()).localAddress(rpcClient.localAddr) -- .remoteAddress(remoteId.address).connect().addListener(new ChannelFutureListener() { -+ .remoteAddress(remoteAddr).connect().addListener(new ChannelFutureListener() { - - @Override - public void operationComplete(ChannelFuture future) throws Exception { - Channel ch = future.channel(); - if (!future.isSuccess()) { - failInit(ch, toIOE(future.cause())); -- rpcClient.failedServers.addToFailedServers(remoteId.address, future.cause()); -+ rpcClient.failedServers.addToFailedServers(remoteId.getAddress(), future.cause()); - return; - } - ch.writeAndFlush(connectionHeaderPreamble.retainedDuplicate()); -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java -index 877d9b0d5b9..5bb08152d30 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java -@@ -18,7 +18,6 @@ - package org.apache.hadoop.hbase.ipc; - - import java.io.Closeable; --import java.io.IOException; - import org.apache.hadoop.hbase.ServerName; - import org.apache.hadoop.hbase.security.User; - import org.apache.yetus.audience.InterfaceAudience; -@@ -64,10 +63,8 @@ public interface RpcClient extends Closeable { - * @param rpcTimeout default rpc operation timeout - * - * @return A blocking rpc channel that goes via this rpc client instance. -- * @throws IOException when channel could not be created - */ -- BlockingRpcChannel createBlockingRpcChannel(ServerName sn, User user, int rpcTimeout) -- throws IOException; -+ BlockingRpcChannel createBlockingRpcChannel(ServerName sn, User user, int rpcTimeout); - - /** - * Creates a "channel" that can be used by a protobuf service. Useful setting up -@@ -79,8 +76,7 @@ public interface RpcClient extends Closeable { - * - * @return A rpc channel that goes via this rpc client instance. - */ -- RpcChannel createRpcChannel(final ServerName sn, final User user, int rpcTimeout) -- throws IOException; -+ RpcChannel createRpcChannel(final ServerName sn, final User user, int rpcTimeout); - - /** - * Interrupt the connections to the given server. This should be called if the server -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java -index e3004e639f8..a87f0e4dbb8 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java -@@ -18,12 +18,11 @@ - package org.apache.hadoop.hbase.ipc; - - import java.io.IOException; --import java.net.InetAddress; --import java.net.UnknownHostException; - import java.util.concurrent.TimeUnit; - - import org.apache.hadoop.conf.Configuration; - import org.apache.hadoop.hbase.HConstants; -+import org.apache.hadoop.hbase.client.MetricsConnection; - import org.apache.hadoop.hbase.codec.Codec; - import org.apache.hadoop.hbase.security.SecurityInfo; - import org.apache.hadoop.hbase.security.User; -@@ -59,8 +58,6 @@ abstract class RpcConnection { - - protected final Token token; - -- protected final InetAddress serverAddress; -- - protected final SecurityInfo securityInfo; - - protected final int reloginMaxBackoff; // max pause before relogin on sasl failure -@@ -69,6 +66,8 @@ abstract class RpcConnection { - - protected final CompressionCodec compressor; - -+ protected final MetricsConnection metrics; -+ - protected final HashedWheelTimer timeoutTimer; - - protected final Configuration conf; -@@ -83,17 +82,13 @@ abstract class RpcConnection { - protected SaslClientAuthenticationProvider provider; - - protected RpcConnection(Configuration conf, HashedWheelTimer timeoutTimer, ConnectionId remoteId, -- String clusterId, boolean isSecurityEnabled, Codec codec, CompressionCodec compressor) -- throws IOException { -- if (remoteId.getAddress().isUnresolved()) { -- throw new UnknownHostException("unknown host: " + remoteId.getAddress().getHostName()); -- } -- this.serverAddress = remoteId.getAddress().getAddress(); -+ String clusterId, boolean isSecurityEnabled, Codec codec, CompressionCodec compressor, -+ MetricsConnection metrics) throws IOException { - this.timeoutTimer = timeoutTimer; - this.codec = codec; - this.compressor = compressor; - this.conf = conf; -- -+ this.metrics = metrics; - User ticket = remoteId.getTicket(); - this.securityInfo = SecurityInfo.getInfo(remoteId.getServiceName()); - this.useSasl = isSecurityEnabled; -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerTooBusyException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerTooBusyException.java -index 135c78d6674..eae9886ca55 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerTooBusyException.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerTooBusyException.java -@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.ipc; - import java.net.InetSocketAddress; - - import org.apache.hadoop.hbase.DoNotRetryIOException; -+import org.apache.hadoop.hbase.net.Address; - import org.apache.yetus.audience.InterfaceAudience; - - /** -@@ -29,7 +30,14 @@ import org.apache.yetus.audience.InterfaceAudience; - @SuppressWarnings("serial") - @InterfaceAudience.Public - public class ServerTooBusyException extends DoNotRetryIOException { -+ -+ public ServerTooBusyException(Address address, long count) { -+ super("Busy Server! " + count + " concurrent RPCs against " + address); -+ } -+ -+ @Deprecated - public ServerTooBusyException(InetSocketAddress address, long count) { - super("Busy Server! " + count + " concurrent RPCs against " + address); - } -+ - } -diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMasterRegistryHedgedReads.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMasterRegistryHedgedReads.java -index 0af01984218..40a38c706a1 100644 ---- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMasterRegistryHedgedReads.java -+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMasterRegistryHedgedReads.java -@@ -90,14 +90,12 @@ public class TestMasterRegistryHedgedReads { - } - - @Override -- public BlockingRpcChannel createBlockingRpcChannel(ServerName sn, User user, int rpcTimeout) -- throws IOException { -+ public BlockingRpcChannel createBlockingRpcChannel(ServerName sn, User user, int rpcTimeout) { - throw new UnsupportedOperationException(); - } - - @Override -- public RpcChannel createRpcChannel(ServerName sn, User user, int rpcTimeout) -- throws IOException { -+ public RpcChannel createRpcChannel(ServerName sn, User user, int rpcTimeout) { - return new RpcChannelImpl(); - } - -diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestConnectionId.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestConnectionId.java -index 3e10f7409c5..48a079d3e75 100644 ---- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestConnectionId.java -+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestConnectionId.java -@@ -22,10 +22,10 @@ import static org.junit.Assert.assertFalse; - import static org.junit.Assert.assertNotEquals; - import static org.junit.Assert.assertTrue; - --import java.net.InetSocketAddress; - import org.apache.hadoop.conf.Configuration; - import org.apache.hadoop.hbase.HBaseClassTestRule; - import org.apache.hadoop.hbase.HBaseConfiguration; -+import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.security.User; - import org.apache.hadoop.hbase.testclassification.ClientTests; - import org.apache.hadoop.hbase.testclassification.SmallTests; -@@ -43,7 +43,7 @@ public class TestConnectionId { - private User testUser1 = User.createUserForTesting(testConfig, "test", new String[]{"testgroup"}); - private User testUser2 = User.createUserForTesting(testConfig, "test", new String[]{"testgroup"}); - private String serviceName = "test"; -- private InetSocketAddress address = new InetSocketAddress(999); -+ private Address address = Address.fromParts("localhost", 999); - private ConnectionId connectionId1 = new ConnectionId(testUser1, serviceName, address); - private ConnectionId connectionId2 = new ConnectionId(testUser2, serviceName, address); - -@@ -66,7 +66,7 @@ public class TestConnectionId { - - @Test - public void testToString() { -- String expectedString = "0.0.0.0/0.0.0.0:999/test/test (auth:SIMPLE)"; -+ String expectedString = "localhost:999/test/test (auth:SIMPLE)"; - assertEquals(expectedString, connectionId1.toString()); - } - -diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java -index 11e02d6a638..fa44022f8d0 100644 ---- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java -+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java -@@ -21,9 +21,9 @@ import static org.hamcrest.CoreMatchers.is; - import static org.hamcrest.MatcherAssert.assertThat; - import static org.junit.Assert.assertEquals; - --import java.net.InetSocketAddress; - import org.apache.hadoop.conf.Configuration; - import org.apache.hadoop.hbase.HBaseClassTestRule; -+import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.testclassification.ClientTests; - import org.apache.hadoop.hbase.testclassification.SmallTests; - import org.apache.log4j.Appender; -@@ -51,7 +51,7 @@ public class TestFailedServersLog { - HBaseClassTestRule.forClass(TestFailedServersLog.class); - - static final int TEST_PORT = 9999; -- private InetSocketAddress addr; -+ private Address addr; - - @Mock - private Appender mockAppender; -@@ -74,7 +74,7 @@ public class TestFailedServersLog { - Throwable nullException = new NullPointerException(); - - FailedServers fs = new FailedServers(new Configuration()); -- addr = new InetSocketAddress(TEST_PORT); -+ addr = Address.fromParts("localhost", TEST_PORT); - - fs.addToFailedServers(addr, nullException); - -diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestIPCUtil.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestIPCUtil.java -index 7e2c59fcb85..45da1e8560d 100644 ---- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestIPCUtil.java -+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestIPCUtil.java -@@ -24,7 +24,6 @@ import static org.junit.Assert.assertTrue; - import java.io.IOException; - import java.lang.reflect.Constructor; - import java.lang.reflect.InvocationTargetException; --import java.net.InetSocketAddress; - import java.util.ArrayList; - import java.util.List; - import java.util.concurrent.CompletableFuture; -@@ -34,6 +33,7 @@ import org.apache.hadoop.hbase.HBaseClassTestRule; - import org.apache.hadoop.hbase.client.RegionInfoBuilder; - import org.apache.hadoop.hbase.exceptions.ClientExceptionsUtil; - import org.apache.hadoop.hbase.exceptions.TimeoutIOException; -+import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.testclassification.ClientTests; - import org.apache.hadoop.hbase.testclassification.SmallTests; - import org.apache.hadoop.hbase.util.FutureUtils; -@@ -101,7 +101,7 @@ public class TestIPCUtil { - for (Class clazz : ClientExceptionsUtil.getConnectionExceptionTypes()) { - exceptions.add(create(clazz)); - } -- InetSocketAddress addr = InetSocketAddress.createUnresolved("127.0.0.1", 12345); -+ Address addr = Address.fromParts("127.0.0.1", 12345); - for (Throwable exception : exceptions) { - if (exception instanceof TimeoutException) { - assertThat(IPCUtil.wrapException(addr, null, exception), instanceOf(TimeoutIOException.class)); -diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyRpcConnection.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyRpcConnection.java -index ab75d6011ab..8782fe116b0 100644 ---- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyRpcConnection.java -+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyRpcConnection.java -@@ -26,9 +26,9 @@ import java.io.IOException; - import java.lang.reflect.InvocationTargetException; - import java.lang.reflect.Method; - import java.lang.reflect.Modifier; --import java.net.InetSocketAddress; - import org.apache.hadoop.hbase.HBaseClassTestRule; - import org.apache.hadoop.hbase.HBaseConfiguration; -+import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.security.User; - import org.apache.hadoop.hbase.testclassification.ClientTests; - import org.apache.hadoop.hbase.testclassification.SmallTests; -@@ -59,7 +59,7 @@ public class TestNettyRpcConnection { - public static void setUp() throws IOException { - CLIENT = new NettyRpcClient(HBaseConfiguration.create()); - CONN = new NettyRpcConnection(CLIENT, -- new ConnectionId(User.getCurrent(), "test", new InetSocketAddress("localhost", 1234))); -+ new ConnectionId(User.getCurrent(), "test", Address.fromParts("localhost", 1234))); - } - - @AfterClass -diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java -index 48fa522397c..725a3764a36 100644 ---- a/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java -+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java -@@ -17,6 +17,8 @@ - */ - package org.apache.hadoop.hbase.net; - -+import java.net.InetSocketAddress; -+ - import org.apache.commons.lang3.StringUtils; - import org.apache.yetus.audience.InterfaceAudience; - -@@ -32,7 +34,7 @@ import org.apache.hbase.thirdparty.com.google.common.net.HostAndPort; - */ - @InterfaceAudience.Public - public class Address implements Comparable
{ -- private HostAndPort hostAndPort; -+ private final HostAndPort hostAndPort; - - private Address(HostAndPort hostAndPort) { - this.hostAndPort = hostAndPort; -@@ -46,6 +48,33 @@ public class Address implements Comparable
{ - return new Address(HostAndPort.fromString(hostnameAndPort)); - } - -+ public static Address fromSocketAddress(InetSocketAddress addr) { -+ return Address.fromParts(addr.getHostString(), addr.getPort()); -+ } -+ -+ public static InetSocketAddress toSocketAddress(Address addr) { -+ return new InetSocketAddress(addr.getHostName(), addr.getPort()); -+ } -+ -+ public static InetSocketAddress[] toSocketAddress(Address[] addrs) { -+ if (addrs == null) { -+ return null; -+ } -+ InetSocketAddress[] result = new InetSocketAddress[addrs.length]; -+ for (int i = 0; i < addrs.length; i++) { -+ result[i] = toSocketAddress(addrs[i]); -+ } -+ return result; -+ } -+ -+ public String getHostName() { -+ return this.hostAndPort.getHost(); -+ } -+ -+ /** -+ * @deprecated Use {@link #getHostName()} instead -+ */ -+ @Deprecated - public String getHostname() { - return this.hostAndPort.getHost(); - } -@@ -65,7 +94,7 @@ public class Address implements Comparable
{ - * otherwise returns same as {@link #toString()}} - */ - public String toStringWithoutDomain() { -- String hostname = getHostname(); -+ String hostname = getHostName(); - String [] parts = hostname.split("\\."); - if (parts.length > 1) { - for (String part: parts) { -@@ -86,7 +115,7 @@ public class Address implements Comparable
{ - } - if (other instanceof Address) { - Address that = (Address)other; -- return this.getHostname().equals(that.getHostname()) && -+ return this.getHostName().equals(that.getHostName()) && - this.getPort() == that.getPort(); - } - return false; -@@ -94,12 +123,12 @@ public class Address implements Comparable
{ - - @Override - public int hashCode() { -- return this.getHostname().hashCode() ^ getPort(); -+ return this.getHostName().hashCode() ^ getPort(); - } - - @Override - public int compareTo(Address that) { -- int compare = this.getHostname().compareTo(that.getHostname()); -+ int compare = this.getHostName().compareTo(that.getHostName()); - if (compare != 0) { - return compare; - } -diff --git a/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java b/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java -index 6ccd138f70d..246d7e0a138 100644 ---- a/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java -+++ b/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java -@@ -100,6 +100,9 @@ public class MemcachedBlockCache implements BlockCache { - // case. - String serverListString = c.get(MEMCACHED_CONFIG_KEY,"localhost:11211"); - String[] servers = serverListString.split(","); -+ // MemcachedClient requires InetSocketAddresses, we have to create them now. Implies any -+ // resolved identities cannot have their address mappings changed while the MemcachedClient -+ // instance is alive. We won't get a chance to trigger re-resolution. - List serverAddresses = new ArrayList<>(servers.length); - for (String s:servers) { - serverAddresses.add(Addressing.createInetSocketAddressFromHostAndPortStr(s)); -diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java -index fe80e7806eb..199fce11bbb 100644 ---- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java -+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java -@@ -127,6 +127,7 @@ import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer; - import org.apache.hadoop.hbase.mob.MobFileCache; - import org.apache.hadoop.hbase.namequeues.NamedQueueRecorder; - import org.apache.hadoop.hbase.namequeues.SlowLogTableOpsChore; -+import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.procedure.RegionServerProcedureManagerHost; - import org.apache.hadoop.hbase.procedure2.RSProcedureCallable; - import org.apache.hadoop.hbase.quotas.FileSystemUtilizationChore; -@@ -320,14 +321,16 @@ public class HRegionServer extends Thread implements - - /** - * Map of encoded region names to the DataNode locations they should be hosted on -- * We store the value as InetSocketAddress since this is used only in HDFS -+ * We store the value as Address since InetSocketAddress is required by the HDFS - * API (create() that takes favored nodes as hints for placing file blocks). - * We could have used ServerName here as the value class, but we'd need to - * convert it to InetSocketAddress at some point before the HDFS API call, and - * it seems a bit weird to store ServerName since ServerName refers to RegionServers -- * and here we really mean DataNode locations. -+ * and here we really mean DataNode locations. We don't store it as InetSocketAddress -+ * here because the conversion on demand from Address to InetSocketAddress will -+ * guarantee the resolution results will be fresh when we need it. - */ -- private final Map regionFavoredNodesMap = new ConcurrentHashMap<>(); -+ private final Map regionFavoredNodesMap = new ConcurrentHashMap<>(); - - private LeaseManager leaseManager; - -@@ -3521,11 +3524,11 @@ public class HRegionServer extends Thread implements - @Override - public void updateRegionFavoredNodesMapping(String encodedRegionName, - List favoredNodes) { -- InetSocketAddress[] addr = new InetSocketAddress[favoredNodes.size()]; -+ Address[] addr = new Address[favoredNodes.size()]; - // Refer to the comment on the declaration of regionFavoredNodesMap on why -- // it is a map of region name to InetSocketAddress[] -+ // it is a map of region name to Address[] - for (int i = 0; i < favoredNodes.size(); i++) { -- addr[i] = InetSocketAddress.createUnresolved(favoredNodes.get(i).getHostName(), -+ addr[i] = Address.fromParts(favoredNodes.get(i).getHostName(), - favoredNodes.get(i).getPort()); - } - regionFavoredNodesMap.put(encodedRegionName, addr); -@@ -3533,13 +3536,14 @@ public class HRegionServer extends Thread implements - - /** - * Return the favored nodes for a region given its encoded name. Look at the -- * comment around {@link #regionFavoredNodesMap} on why it is InetSocketAddress[] -- * -+ * comment around {@link #regionFavoredNodesMap} on why we convert to InetSocketAddress[] -+ * here. -+ * @param encodedRegionName - * @return array of favored locations - */ - @Override - public InetSocketAddress[] getFavoredNodesForRegion(String encodedRegionName) { -- return regionFavoredNodesMap.get(encodedRegionName); -+ return Address.toSocketAddress(regionFavoredNodesMap.get(encodedRegionName)); - } - - @Override -diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCIBadHostname.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCIBadHostname.java -deleted file mode 100644 -index 83d4bfaf4bb..00000000000 ---- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCIBadHostname.java -+++ /dev/null -@@ -1,99 +0,0 @@ --/** -- * Licensed to the Apache Software Foundation (ASF) under one -- * or more contributor license agreements. See the NOTICE file -- * distributed with this work for additional information -- * regarding copyright ownership. The ASF licenses this file -- * to you under the Apache License, Version 2.0 (the -- * "License"); you may not use this file except in compliance -- * with the License. You may obtain a copy of the License at -- * -- * http://www.apache.org/licenses/LICENSE-2.0 -- * -- * Unless required by applicable law or agreed to in writing, software -- * distributed under the License is distributed on an "AS IS" BASIS, -- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -- * See the License for the specific language governing permissions and -- * limitations under the License. -- */ --package org.apache.hadoop.hbase.client; -- --import static org.junit.Assert.fail; -- --import java.net.UnknownHostException; --import org.apache.hadoop.hbase.HBaseClassTestRule; --import org.apache.hadoop.hbase.HBaseTestingUtility; --import org.apache.hadoop.hbase.HConstants; --import org.apache.hadoop.hbase.ServerName; --import org.apache.hadoop.hbase.testclassification.ClientTests; --import org.apache.hadoop.hbase.testclassification.MediumTests; --import org.junit.AfterClass; --import org.junit.BeforeClass; --import org.junit.ClassRule; --import org.junit.Test; --import org.junit.experimental.categories.Category; -- --/** -- * Tests that we fail fast when hostname resolution is not working and do not cache -- * unresolved InetSocketAddresses. -- */ --@Category({MediumTests.class, ClientTests.class}) --public class TestCIBadHostname { -- -- @ClassRule -- public static final HBaseClassTestRule CLASS_RULE = -- HBaseClassTestRule.forClass(TestCIBadHostname.class); -- -- private static HBaseTestingUtility testUtil; -- private static ConnectionImplementation conn; -- -- @BeforeClass -- public static void setupBeforeClass() throws Exception { -- testUtil = HBaseTestingUtility.createLocalHTU(); -- testUtil.startMiniCluster(); -- conn = (ConnectionImplementation) testUtil.getConnection(); -- } -- -- @AfterClass -- public static void teardownAfterClass() throws Exception { -- conn.close(); -- testUtil.shutdownMiniCluster(); -- } -- -- @Test(expected = UnknownHostException.class) -- public void testGetAdminBadHostname() throws Exception { -- // verify that we can get an instance with the cluster hostname -- ServerName master = testUtil.getHBaseCluster().getMaster().getServerName(); -- try { -- conn.getAdmin(master); -- } catch (UnknownHostException uhe) { -- fail("Obtaining admin to the cluster master should have succeeded"); -- } -- -- // test that we fail to get a client to an unresolvable hostname, which -- // means it won't be cached -- ServerName badHost = -- ServerName.valueOf("unknownhost.invalid:" + HConstants.DEFAULT_MASTER_PORT, -- System.currentTimeMillis()); -- conn.getAdmin(badHost); -- fail("Obtaining admin to unresolvable hostname should have failed"); -- } -- -- @Test(expected = UnknownHostException.class) -- public void testGetClientBadHostname() throws Exception { -- // verify that we can get an instance with the cluster hostname -- ServerName rs = testUtil.getHBaseCluster().getRegionServer(0).getServerName(); -- try { -- conn.getClient(rs); -- } catch (UnknownHostException uhe) { -- fail("Obtaining client to the cluster regionserver should have succeeded"); -- } -- -- // test that we fail to get a client to an unresolvable hostname, which -- // means it won't be cached -- ServerName badHost = -- ServerName.valueOf("unknownhost.invalid:" + HConstants.DEFAULT_REGIONSERVER_PORT, -- System.currentTimeMillis()); -- conn.getAdmin(badHost); -- fail("Obtaining client to unresolvable hostname should have failed"); -- } --} -diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java -index aa8e10d5111..dbc0da981be 100644 ---- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java -+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java -@@ -20,10 +20,8 @@ package org.apache.hadoop.hbase.client; - import static org.junit.Assert.assertFalse; - import static org.junit.Assert.assertTrue; - --import java.net.InetSocketAddress; - import java.net.SocketAddress; - import java.net.SocketTimeoutException; --import java.net.UnknownHostException; - import java.util.Random; - import java.util.concurrent.ThreadLocalRandom; - import java.util.concurrent.atomic.AtomicInteger; -@@ -38,6 +36,7 @@ import org.apache.hadoop.hbase.ipc.AbstractRpcClient; - import org.apache.hadoop.hbase.ipc.BlockingRpcClient; - import org.apache.hadoop.hbase.ipc.HBaseRpcController; - import org.apache.hadoop.hbase.ipc.RpcClientFactory; -+import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.security.User; - import org.apache.hadoop.hbase.testclassification.ClientTests; - import org.apache.hadoop.hbase.testclassification.MediumTests; -@@ -147,14 +146,12 @@ public class TestClientTimeouts { - - // Return my own instance, one that does random timeouts - @Override -- public BlockingRpcChannel createBlockingRpcChannel(ServerName sn, -- User ticket, int rpcTimeout) throws UnknownHostException { -+ public BlockingRpcChannel createBlockingRpcChannel(ServerName sn, User ticket, int rpcTimeout) { - return new RandomTimeoutBlockingRpcChannel(this, sn, ticket, rpcTimeout); - } - - @Override -- public RpcChannel createRpcChannel(ServerName sn, User ticket, int rpcTimeout) -- throws UnknownHostException { -+ public RpcChannel createRpcChannel(ServerName sn, User ticket, int rpcTimeout) { - return new RandomTimeoutRpcChannel(this, sn, ticket, rpcTimeout); - } - } -@@ -170,7 +167,7 @@ public class TestClientTimeouts { - - RandomTimeoutBlockingRpcChannel(final BlockingRpcClient rpcClient, final ServerName sn, - final User ticket, final int rpcTimeout) { -- super(rpcClient, new InetSocketAddress(sn.getHostname(), sn.getPort()), ticket, rpcTimeout); -+ super(rpcClient, Address.fromParts(sn.getHostname(), sn.getPort()), ticket, rpcTimeout); - } - - @Override -@@ -191,8 +188,8 @@ public class TestClientTimeouts { - private static class RandomTimeoutRpcChannel extends AbstractRpcClient.RpcChannelImplementation { - - RandomTimeoutRpcChannel(AbstractRpcClient rpcClient, ServerName sn, User ticket, -- int rpcTimeout) throws UnknownHostException { -- super(rpcClient, new InetSocketAddress(sn.getHostname(), sn.getPort()), ticket, rpcTimeout); -+ int rpcTimeout) { -+ super(rpcClient, Address.fromParts(sn.getHostname(), sn.getPort()), ticket, rpcTimeout); - } - - @Override -diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseClient.java -index ad4741ccf8a..680836b982f 100644 ---- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseClient.java -+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseClient.java -@@ -17,9 +17,9 @@ - */ - package org.apache.hadoop.hbase.ipc; - --import java.net.InetSocketAddress; - import org.apache.hadoop.conf.Configuration; - import org.apache.hadoop.hbase.HBaseClassTestRule; -+import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.testclassification.RPCTests; - import org.apache.hadoop.hbase.testclassification.SmallTests; - import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -@@ -42,11 +42,11 @@ public class TestHBaseClient { - FailedServers fs = new FailedServers(new Configuration()); - Throwable testThrowable = new Throwable();//throwable already tested in TestFailedServers.java - -- InetSocketAddress ia = InetSocketAddress.createUnresolved("bad", 12); -+ Address ia = Address.fromParts("bad", 12); - // same server as ia -- InetSocketAddress ia2 = InetSocketAddress.createUnresolved("bad", 12); -- InetSocketAddress ia3 = InetSocketAddress.createUnresolved("badtoo", 12); -- InetSocketAddress ia4 = InetSocketAddress.createUnresolved("badtoo", 13); -+ Address ia2 = Address.fromParts("bad", 12); -+ Address ia3 = Address.fromParts("badtoo", 12); -+ Address ia4 = Address.fromParts("badtoo", 13); - - - Assert.assertFalse(fs.isFailedServer(ia)); -diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java -index 910d3097f69..d0569dd9f1c 100644 ---- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java -+++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java -@@ -26,6 +26,7 @@ import java.io.OutputStreamWriter; - import java.io.PrintWriter; - import java.net.InetSocketAddress; - import java.net.Socket; -+import java.net.UnknownHostException; - import java.nio.charset.StandardCharsets; - import java.util.ArrayList; - import java.util.Arrays; -@@ -1808,10 +1809,12 @@ public final class ZKUtil { - int port = sp.length > 1 ? Integer.parseInt(sp[1]) - : HConstants.DEFAULT_ZOOKEEPER_CLIENT_PORT; - -- InetSocketAddress sockAddr = new InetSocketAddress(host, port); - try (Socket socket = new Socket()) { -+ InetSocketAddress sockAddr = new InetSocketAddress(host, port); -+ if (sockAddr.isUnresolved()) { -+ throw new UnknownHostException(host + " cannot be resolved"); -+ } - socket.connect(sockAddr, timeout); -- - socket.setSoTimeout(timeout); - try (PrintWriter out = new PrintWriter(new BufferedWriter( - new OutputStreamWriter(socket.getOutputStream(), StandardCharsets.UTF_8)), true); --- -2.42.1 - diff --git a/hbase/stackable/patches/2.4.12/004-HBASE-25336-2.4.12.patch b/hbase/stackable/patches/2.4.12/004-HBASE-25336-2.4.12.patch deleted file mode 100644 index eb4bd2205..000000000 --- a/hbase/stackable/patches/2.4.12/004-HBASE-25336-2.4.12.patch +++ /dev/null @@ -1,342 +0,0 @@ -From e04956f7bb5d95a54612a99905ee2d8e7f0de23a Mon Sep 17 00:00:00 2001 -From: Duo Zhang -Date: Mon, 7 Dec 2020 21:49:04 +0800 -Subject: [PATCH] HBASE-25336 Use Address instead of InetSocketAddress in - RpcClient implementation (#2716) - -Signed-off-by: Guanghao Zhang -(cherry picked from commit f8134795109bc380b53ec814561e1abdb56b2b58) ---- - .../hadoop/hbase/ipc/AbstractRpcClient.java | 60 ++++--------------- - .../hbase/ipc/BlockingRpcConnection.java | 25 ++------ - .../hadoop/hbase/ipc/NettyRpcConnection.java | 27 ++------- - .../hadoop/hbase/ipc/RpcConnection.java | 24 +++++++- - 4 files changed, 39 insertions(+), 97 deletions(-) - -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java -index e4f0a7a36f4..3c41aadc852 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java -@@ -22,9 +22,7 @@ import static org.apache.hadoop.hbase.ipc.IPCUtil.toIOE; - import static org.apache.hadoop.hbase.ipc.IPCUtil.wrapException; - - import java.io.IOException; --import java.net.InetSocketAddress; - import java.net.SocketAddress; --import java.net.UnknownHostException; - import java.util.Collection; - import java.util.concurrent.Executors; - import java.util.concurrent.ScheduledExecutorService; -@@ -320,7 +318,7 @@ public abstract class AbstractRpcClient implements RpcC - * @return A pair with the Message response and the Cell data (if any). - */ - private Message callBlockingMethod(Descriptors.MethodDescriptor md, HBaseRpcController hrc, -- Message param, Message returnType, final User ticket, final InetSocketAddress isa) -+ Message param, Message returnType, final User ticket, final Address isa) - throws ServiceException { - BlockingRpcCallback done = new BlockingRpcCallback<>(); - callMethod(md, hrc, param, returnType, ticket, isa, done); -@@ -392,7 +390,7 @@ public abstract class AbstractRpcClient implements RpcC - - Call callMethod(final Descriptors.MethodDescriptor md, final HBaseRpcController hrc, - final Message param, Message returnType, final User ticket, -- final InetSocketAddress inetAddr, final RpcCallback callback) { -+ final Address addr, final RpcCallback callback) { - final MetricsConnection.CallStats cs = MetricsConnection.newCallStats(); - cs.setStartTime(EnvironmentEdgeManager.currentTime()); - -@@ -406,7 +404,6 @@ public abstract class AbstractRpcClient implements RpcC - cs.setNumActionsPerServer(numActions); - } - -- final Address addr = Address.fromSocketAddress(inetAddr); - final AtomicInteger counter = concurrentCounterCache.getUnchecked(addr); - Call call = new Call(nextCallId(), md, param, hrc.cellScanner(), returnType, - hrc.getCallTimeout(), hrc.getPriority(), new RpcCallback() { -@@ -522,13 +519,6 @@ public abstract class AbstractRpcClient implements RpcC - - protected final Address addr; - -- // We cache the resolved InetSocketAddress for the channel so we do not do a DNS lookup -- // per method call on the channel. If the remote target is removed or reprovisioned and -- // its identity changes a new channel with a newly resolved InetSocketAddress will be -- // created as part of retry, so caching here is fine. -- // Normally, caching an InetSocketAddress is an anti-pattern. -- protected InetSocketAddress isa; -- - protected final AbstractRpcClient rpcClient; - - protected final User ticket; -@@ -578,23 +568,9 @@ public abstract class AbstractRpcClient implements RpcC - - @Override - public Message callBlockingMethod(Descriptors.MethodDescriptor md, RpcController controller, -- Message param, Message returnType) throws ServiceException { -- // Look up remote address upon first call -- if (isa == null) { -- if (this.rpcClient.metrics != null) { -- this.rpcClient.metrics.incrNsLookups(); -- } -- isa = Address.toSocketAddress(addr); -- if (isa.isUnresolved()) { -- if (this.rpcClient.metrics != null) { -- this.rpcClient.metrics.incrNsLookupsFailed(); -- } -- isa = null; -- throw new ServiceException(new UnknownHostException(addr + " could not be resolved")); -- } -- } -- return rpcClient.callBlockingMethod(md, configureRpcController(controller), -- param, returnType, ticket, isa); -+ Message param, Message returnType) throws ServiceException { -+ return rpcClient.callBlockingMethod(md, configureRpcController(controller), param, returnType, -+ ticket, addr); - } - } - -@@ -610,29 +586,13 @@ public abstract class AbstractRpcClient implements RpcC - } - - @Override -- public void callMethod(Descriptors.MethodDescriptor md, RpcController controller, -- Message param, Message returnType, RpcCallback done) { -- HBaseRpcController configuredController = -- configureRpcController(Preconditions.checkNotNull(controller, -- "RpcController can not be null for async rpc call")); -- // Look up remote address upon first call -- if (isa == null || isa.isUnresolved()) { -- if (this.rpcClient.metrics != null) { -- this.rpcClient.metrics.incrNsLookups(); -- } -- isa = Address.toSocketAddress(addr); -- if (isa.isUnresolved()) { -- if (this.rpcClient.metrics != null) { -- this.rpcClient.metrics.incrNsLookupsFailed(); -- } -- isa = null; -- controller.setFailed(addr + " could not be resolved"); -- return; -- } -- } -+ public void callMethod(Descriptors.MethodDescriptor md, RpcController controller, Message param, -+ Message returnType, RpcCallback done) { -+ HBaseRpcController configuredController = configureRpcController( -+ Preconditions.checkNotNull(controller, "RpcController can not be null for async rpc call")); - // This method does not throw any exceptions, so the caller must provide a - // HBaseRpcController which is used to pass the exceptions. -- this.rpcClient.callMethod(md, configuredController, param, returnType, ticket, isa, done); -+ this.rpcClient.callMethod(md, configuredController, param, returnType, ticket, addr, done); - } - } - } -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java -index ce2bd11f960..cd8035fd58e 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java -@@ -35,7 +35,6 @@ import java.io.OutputStream; - import java.net.InetSocketAddress; - import java.net.Socket; - import java.net.SocketTimeoutException; --import java.net.UnknownHostException; - import java.security.PrivilegedExceptionAction; - import java.util.ArrayDeque; - import java.util.Locale; -@@ -44,7 +43,6 @@ import java.util.concurrent.ConcurrentHashMap; - import java.util.concurrent.ConcurrentMap; - import java.util.concurrent.ThreadLocalRandom; - import javax.security.sasl.SaslException; -- - import org.apache.hadoop.conf.Configuration; - import org.apache.hadoop.hbase.CellScanner; - import org.apache.hadoop.hbase.DoNotRetryIOException; -@@ -52,7 +50,6 @@ import org.apache.hadoop.hbase.exceptions.ConnectionClosingException; - import org.apache.hadoop.hbase.io.ByteArrayOutputStream; - import org.apache.hadoop.hbase.ipc.HBaseRpcController.CancellationCallback; - import org.apache.hadoop.hbase.log.HBaseMarkers; --import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.security.HBaseSaslRpcClient; - import org.apache.hadoop.hbase.security.SaslUtil; - import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection; -@@ -69,11 +66,13 @@ import org.apache.htrace.core.TraceScope; - import org.apache.yetus.audience.InterfaceAudience; - import org.slf4j.Logger; - import org.slf4j.LoggerFactory; -+ - import org.apache.hbase.thirdparty.com.google.protobuf.Message; - import org.apache.hbase.thirdparty.com.google.protobuf.Message.Builder; - import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback; - import org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf; - import org.apache.hbase.thirdparty.io.netty.buffer.PooledByteBufAllocator; -+ - import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; - import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos; - import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta; -@@ -256,16 +255,7 @@ class BlockingRpcConnection extends RpcConnection implements Runnable { - if (this.rpcClient.localAddr != null) { - this.socket.bind(this.rpcClient.localAddr); - } -- if (this.rpcClient.metrics != null) { -- this.rpcClient.metrics.incrNsLookups(); -- } -- InetSocketAddress remoteAddr = Address.toSocketAddress(remoteId.getAddress()); -- if (remoteAddr.isUnresolved()) { -- if (this.rpcClient.metrics != null) { -- this.rpcClient.metrics.incrNsLookupsFailed(); -- } -- throw new UnknownHostException(remoteId.getAddress() + " could not be resolved"); -- } -+ InetSocketAddress remoteAddr = getRemoteInetAddress(rpcClient.metrics); - NetUtils.connect(this.socket, remoteAddr, this.rpcClient.connectTO); - this.socket.setSoTimeout(this.rpcClient.readTO); - return; -@@ -374,15 +364,8 @@ class BlockingRpcConnection extends RpcConnection implements Runnable { - if (this.metrics != null) { - this.metrics.incrNsLookups(); - } -- InetSocketAddress serverAddr = Address.toSocketAddress(remoteId.getAddress()); -- if (serverAddr.isUnresolved()) { -- if (this.metrics != null) { -- this.metrics.incrNsLookupsFailed(); -- } -- throw new UnknownHostException(remoteId.getAddress() + " could not be resolved"); -- } - saslRpcClient = new HBaseSaslRpcClient(this.rpcClient.conf, provider, token, -- serverAddr.getAddress(), securityInfo, this.rpcClient.fallbackAllowed, -+ socket.getInetAddress(), securityInfo, this.rpcClient.fallbackAllowed, - this.rpcClient.conf.get("hbase.rpc.protection", - QualityOfProtection.AUTHENTICATION.name().toLowerCase(Locale.ROOT)), - this.rpcClient.conf.getBoolean(CRYPTO_AES_ENABLED_KEY, CRYPTO_AES_ENABLED_DEFAULT)); -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java -index 609d2c12cea..d0a13ca33d6 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java -@@ -32,17 +32,16 @@ import java.util.concurrent.ThreadLocalRandom; - import java.util.concurrent.TimeUnit; - import org.apache.hadoop.hbase.ipc.BufferCallBeforeInitHandler.BufferCallEvent; - import org.apache.hadoop.hbase.ipc.HBaseRpcController.CancellationCallback; --import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.security.NettyHBaseRpcConnectionHeaderHandler; - import org.apache.hadoop.hbase.security.NettyHBaseSaslRpcClientHandler; - import org.apache.hadoop.hbase.security.SaslChallengeDecoder; - import org.apache.hadoop.hbase.util.Threads; - import org.apache.hadoop.security.UserGroupInformation; --import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; - import org.apache.yetus.audience.InterfaceAudience; - import org.slf4j.Logger; - import org.slf4j.LoggerFactory; - -+import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; - import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback; - import org.apache.hbase.thirdparty.io.netty.bootstrap.Bootstrap; - import org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf; -@@ -210,18 +209,9 @@ class NettyRpcConnection extends RpcConnection { - Promise saslPromise = ch.eventLoop().newPromise(); - final NettyHBaseSaslRpcClientHandler saslHandler; - try { -- if (this.metrics != null) { -- this.metrics.incrNsLookups(); -- } -- InetSocketAddress serverAddr = Address.toSocketAddress(remoteId.getAddress()); -- if (serverAddr.isUnresolved()) { -- if (this.metrics != null) { -- this.metrics.incrNsLookupsFailed(); -- } -- throw new UnknownHostException(remoteId.getAddress() + " could not be resolved"); -- } - saslHandler = new NettyHBaseSaslRpcClientHandler(saslPromise, ticket, provider, token, -- serverAddr.getAddress(), securityInfo, rpcClient.fallbackAllowed, this.rpcClient.conf); -+ ((InetSocketAddress) ch.remoteAddress()).getAddress(), securityInfo, -+ rpcClient.fallbackAllowed, this.rpcClient.conf); - } catch (IOException e) { - failInit(ch, e); - return; -@@ -282,16 +272,7 @@ class NettyRpcConnection extends RpcConnection { - private void connect() throws UnknownHostException { - assert eventLoop.inEventLoop(); - LOG.trace("Connecting to {}", remoteId.getAddress()); -- if (this.rpcClient.metrics != null) { -- this.rpcClient.metrics.incrNsLookups(); -- } -- InetSocketAddress remoteAddr = Address.toSocketAddress(remoteId.getAddress()); -- if (remoteAddr.isUnresolved()) { -- if (this.rpcClient.metrics != null) { -- this.rpcClient.metrics.incrNsLookupsFailed(); -- } -- throw new UnknownHostException(remoteId.getAddress() + " could not be resolved"); -- } -+ InetSocketAddress remoteAddr = getRemoteInetAddress(rpcClient.metrics); - this.channel = new Bootstrap().group(eventLoop).channel(rpcClient.channelClass) - .option(ChannelOption.TCP_NODELAY, rpcClient.isTcpNoDelay()) - .option(ChannelOption.SO_KEEPALIVE, rpcClient.tcpKeepAlive) -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java -index a87f0e4dbb8..c9444668018 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java -@@ -18,12 +18,15 @@ - package org.apache.hadoop.hbase.ipc; - - import java.io.IOException; -+import java.net.InetSocketAddress; -+import java.net.UnknownHostException; - import java.util.concurrent.TimeUnit; - - import org.apache.hadoop.conf.Configuration; - import org.apache.hadoop.hbase.HConstants; - import org.apache.hadoop.hbase.client.MetricsConnection; - import org.apache.hadoop.hbase.codec.Codec; -+import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.security.SecurityInfo; - import org.apache.hadoop.hbase.security.User; - import org.apache.hadoop.hbase.security.provider.SaslClientAuthenticationProvider; -@@ -122,7 +125,7 @@ abstract class RpcConnection { - this.remoteId = remoteId; - } - -- protected void scheduleTimeoutTask(final Call call) { -+ protected final void scheduleTimeoutTask(final Call call) { - if (call.timeout > 0) { - call.timeoutTask = timeoutTimer.newTimeout(new TimerTask() { - -@@ -137,7 +140,7 @@ abstract class RpcConnection { - } - } - -- protected byte[] getConnectionHeaderPreamble() { -+ protected final byte[] getConnectionHeaderPreamble() { - // Assemble the preamble up in a buffer first and then send it. Writing individual elements, - // they are getting sent across piecemeal according to wireshark and then server is messing - // up the reading on occasion (the passed in stream is not buffered yet). -@@ -153,7 +156,7 @@ abstract class RpcConnection { - return preamble; - } - -- protected ConnectionHeader getConnectionHeader() { -+ protected final ConnectionHeader getConnectionHeader() { - final ConnectionHeader.Builder builder = ConnectionHeader.newBuilder(); - builder.setServiceName(remoteId.getServiceName()); - final UserInformation userInfoPB = provider.getUserInfo(remoteId.ticket); -@@ -176,6 +179,21 @@ abstract class RpcConnection { - return builder.build(); - } - -+ protected final InetSocketAddress getRemoteInetAddress(MetricsConnection metrics) -+ throws UnknownHostException { -+ if (metrics != null) { -+ metrics.incrNsLookups(); -+ } -+ InetSocketAddress remoteAddr = Address.toSocketAddress(remoteId.getAddress()); -+ if (remoteAddr.isUnresolved()) { -+ if (metrics != null) { -+ metrics.incrNsLookupsFailed(); -+ } -+ throw new UnknownHostException(remoteId.getAddress() + " could not be resolved"); -+ } -+ return remoteAddr; -+ } -+ - protected abstract void callTimeout(Call call); - - public ConnectionId remoteId() { --- -2.42.1 - diff --git a/hbase/stackable/patches/2.4.12/005-HBASE-27027-2.4.12.patch b/hbase/stackable/patches/2.4.12/005-HBASE-27027-2.4.12.patch deleted file mode 100644 index 78c5117ed..000000000 --- a/hbase/stackable/patches/2.4.12/005-HBASE-27027-2.4.12.patch +++ /dev/null @@ -1,39 +0,0 @@ -diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java -index 884780cecc..72cb7bed07 100644 ---- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java -+++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java -@@ -425,7 +425,7 @@ public class HttpServer implements FilterContainer { - } else if ("https".equals(scheme)) { - HttpConfiguration httpsConfig = new HttpConfiguration(httpConfig); - httpsConfig.addCustomizer(new SecureRequestCustomizer()); -- SslContextFactory sslCtxFactory = new SslContextFactory(); -+ SslContextFactory.Server sslCtxFactory = new SslContextFactory.Server(); - sslCtxFactory.setNeedClientAuth(needsClientAuth); - sslCtxFactory.setKeyManagerPassword(keyPassword); - -diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java -index 1ff59c245c..66a0ec999c 100644 ---- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java -+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java -@@ -314,7 +314,7 @@ public class RESTServer implements Constants { - HttpConfiguration httpsConfig = new HttpConfiguration(httpConfig); - httpsConfig.addCustomizer(new SecureRequestCustomizer()); - -- SslContextFactory sslCtxFactory = new SslContextFactory(); -+ SslContextFactory.Server sslCtxFactory = new SslContextFactory.Server(); - String keystore = conf.get(REST_SSL_KEYSTORE_STORE); - String keystoreType = conf.get(REST_SSL_KEYSTORE_TYPE); - String password = HBaseConfiguration.getPassword(conf, -diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java -index 0b86061bd7..cf7e1db86a 100644 ---- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java -+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java -@@ -417,7 +417,7 @@ public class ThriftServer extends Configured implements Tool { - HttpConfiguration httpsConfig = new HttpConfiguration(httpConfig); - httpsConfig.addCustomizer(new SecureRequestCustomizer()); - -- SslContextFactory sslCtxFactory = new SslContextFactory(); -+ SslContextFactory.Server sslCtxFactory = new SslContextFactory.Server(); - String keystore = conf.get(THRIFT_SSL_KEYSTORE_STORE_KEY); - String password = HBaseConfiguration.getPassword(conf, - THRIFT_SSL_KEYSTORE_PASSWORD_KEY, null); diff --git a/hbase/stackable/patches/2.4.12/006-HBASE-28242-2.4.12.patch b/hbase/stackable/patches/2.4.12/006-HBASE-28242-2.4.12.patch deleted file mode 100644 index 6d938fa20..000000000 --- a/hbase/stackable/patches/2.4.12/006-HBASE-28242-2.4.12.patch +++ /dev/null @@ -1,462 +0,0 @@ -Subject: [PATCH] HBASE-28242: Adapts ProfileServlet for async-profiler 2.x ---- -Index: hbase-http/src/main/java/org/apache/hadoop/hbase/http/ProfileServlet.java -IDEA additional info: -Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP -<+>UTF-8 -=================================================================== -diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ProfileServlet.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ProfileServlet.java ---- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ProfileServlet.java (revision e04956f7bb5d95a54612a99905ee2d8e7f0de23a) -+++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ProfileServlet.java (date 1701905406872) -@@ -25,63 +25,72 @@ - import java.util.concurrent.atomic.AtomicInteger; - import java.util.concurrent.locks.Lock; - import java.util.concurrent.locks.ReentrantLock; -- - import javax.servlet.http.HttpServlet; - import javax.servlet.http.HttpServletRequest; - import javax.servlet.http.HttpServletResponse; -- - import org.apache.hadoop.hbase.util.ProcessUtils; - import org.apache.yetus.audience.InterfaceAudience; -- - import org.slf4j.Logger; - import org.slf4j.LoggerFactory; - - import org.apache.hbase.thirdparty.com.google.common.base.Joiner; - - /** -- * Servlet that runs async-profiler as web-endpoint. -- * Following options from async-profiler can be specified as query paramater. -- * // -e event profiling event: cpu|alloc|lock|cache-misses etc. -- * // -d duration run profiling for 'duration' seconds (integer) -- * // -i interval sampling interval in nanoseconds (long) -- * // -j jstackdepth maximum Java stack depth (integer) -- * // -b bufsize frame buffer size (long) -- * // -t profile different threads separately -- * // -s simple class names instead of FQN -- * // -o fmt[,fmt...] output format: summary|traces|flat|collapsed|svg|tree|jfr|html -- * // --width px SVG width pixels (integer) -- * // --height px SVG frame height pixels (integer) -- * // --minwidth px skip frames smaller than px (double) -- * // --reverse generate stack-reversed FlameGraph / Call tree -+ * Servlet that runs async-profiler as web-endpoint. Following options from async-profiler can be -+ * specified as query parameter. -+ *
    -+ *
  • -e event profiling event: cpu|alloc|lock|cache-misses etc.
  • -+ *
  • -d duration run profiling for 'duration' seconds (integer), default 10s
  • -+ *
  • -i interval sampling interval in nanoseconds (long), default 10ms
  • -+ *
  • -j jstackdepth maximum Java stack depth (integer), default 2048
  • -+ *
  • -t profile different threads separately
  • -+ *
  • -s simple class names instead of FQN
  • -+ *
  • -g print method signatures
  • -+ *
  • -a annotate Java methods
  • -+ *
  • -l prepend library names
  • -+ *
  • -o fmt output format: flat|traces|collapsed|flamegraph|tree|jfr
  • -+ *
  • --minwidth pct skip frames smaller than pct% (double)
  • -+ *
  • --reverse generate stack-reversed FlameGraph / Call tree
  • -+ *
- * Example: -- * - To collect 30 second CPU profile of current process (returns FlameGraph svg) -- * curl "http://localhost:10002/prof" -- * - To collect 1 minute CPU profile of current process and output in tree format (html) -- * curl "http://localhost:10002/prof?output=tree&duration=60" -- * - To collect 30 second heap allocation profile of current process (returns FlameGraph svg) -- * curl "http://localhost:10002/prof?event=alloc" -- * - To collect lock contention profile of current process (returns FlameGraph svg) -- * curl "http://localhost:10002/prof?event=lock" -- * Following event types are supported (default is 'cpu') (NOTE: not all OS'es support all events) -- * // Perf events: -- * // cpu -- * // page-faults -- * // context-switches -- * // cycles -- * // instructions -- * // cache-references -- * // cache-misses -- * // branches -- * // branch-misses -- * // bus-cycles -- * // L1-dcache-load-misses -- * // LLC-load-misses -- * // dTLB-load-misses -- * // mem:breakpoint -- * // trace:tracepoint -- * // Java events: -- * // alloc -- * // lock -+ *
    -+ *
  • To collect 30 second CPU profile of current process (returns FlameGraph svg): -+ * {@code curl http://localhost:10002/prof"}
  • -+ *
  • To collect 1 minute CPU profile of current process and output in tree format (html) -+ * {@code curl "http://localhost:10002/prof?output=tree&duration=60"}
  • -+ *
  • To collect 30 second heap allocation profile of current process (returns FlameGraph): -+ * {@code curl "http://localhost:10002/prof?event=alloc"}
  • -+ *
  • To collect lock contention profile of current process (returns FlameGraph): -+ * {@code curl "http://localhost:10002/prof?event=lock"}
  • -+ *
-+ * Following event types are supported (default is 'cpu') (NOTE: not all OS'es support all -+ * events).
-+ * Basic events: -+ *
    -+ *
  • cpu
  • -+ *
  • alloc
  • -+ *
  • lock
  • -+ *
  • wall
  • -+ *
  • itimer
  • -+ *
-+ * Perf events: -+ *
    -+ *
  • L1-dcache-load-misses
  • -+ *
  • LLC-load-misses
  • -+ *
  • branch-instructions
  • -+ *
  • branch-misses
  • -+ *
  • bus-cycles
  • -+ *
  • cache-misses
  • -+ *
  • cache-references
  • -+ *
  • context-switches
  • -+ *
  • cpu
  • -+ *
  • cycles
  • -+ *
  • dTLB-load-misses
  • -+ *
  • instructions
  • -+ *
  • mem:breakpoint
  • -+ *
  • page-faults
  • -+ *
  • trace:tracepoint
  • -+ *
- */ - @InterfaceAudience.Private - public class ProfileServlet extends HttpServlet { -@@ -104,19 +113,21 @@ - CPU("cpu"), - ALLOC("alloc"), - LOCK("lock"), -- PAGE_FAULTS("page-faults"), -+ WALL("wall"), -+ ITIMER("itimer"), -+ BRANCH_INSTRUCTIONS("branch-instructions"), -+ BRANCH_MISSES("branch-misses"), -+ BUS_CYCLES("bus-cycles"), -+ CACHE_MISSES("cache-misses"), -+ CACHE_REFERENCES("cache-references"), - CONTEXT_SWITCHES("context-switches"), - CYCLES("cycles"), -+ DTLB_LOAD_MISSES("dTLB-load-misses"), - INSTRUCTIONS("instructions"), -- CACHE_REFERENCES("cache-references"), -- CACHE_MISSES("cache-misses"), -- BRANCHES("branches"), -- BRANCH_MISSES("branch-misses"), -- BUS_CYCLES("bus-cycles"), - L1_DCACHE_LOAD_MISSES("L1-dcache-load-misses"), - LLC_LOAD_MISSES("LLC-load-misses"), -- DTLB_LOAD_MISSES("dTLB-load-misses"), - MEM_BREAKPOINT("mem:breakpoint"), -+ PAGE_FAULTS("page-faults"), - TRACE_TRACEPOINT("trace:tracepoint"),; - - private final String internalName; -@@ -125,11 +136,11 @@ - this.internalName = internalName; - } - -- public String getInternalName() { -+ String getInternalName() { - return internalName; - } - -- public static Event fromInternalName(final String name) { -+ static Event fromInternalName(final String name) { - for (Event event : values()) { - if (event.getInternalName().equalsIgnoreCase(name)) { - return event; -@@ -140,35 +151,31 @@ - } - } - -- enum Output { -- SUMMARY, -- TRACES, -+ private enum Output { -+ COLLAPSED, -+ FLAMEGRAPH, - FLAT, -- COLLAPSED, -- // No SVG in 2.x asyncprofiler. -- SVG, -- TREE, - JFR, -- // In 2.x asyncprofiler, this is how you get flamegraphs. -- HTML -+ TRACES, -+ TREE - } - - @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "SE_TRANSIENT_FIELD_NOT_RESTORED", -- justification = "This class is never serialized nor restored.") -- private transient Lock profilerLock = new ReentrantLock(); -+ justification = "This class is never serialized nor restored.") -+ private final transient Lock profilerLock = new ReentrantLock(); - private transient volatile Process process; -- private String asyncProfilerHome; -+ private final String asyncProfilerHome; - private Integer pid; - - public ProfileServlet() { - this.asyncProfilerHome = getAsyncProfilerHome(); - this.pid = ProcessUtils.getPid(); -- LOG.info("Servlet process PID: " + pid + " asyncProfilerHome: " + asyncProfilerHome); -+ LOG.info("Servlet process PID: {} asyncProfilerHome: {}", pid, asyncProfilerHome); - } - - @Override - protected void doGet(final HttpServletRequest req, final HttpServletResponse resp) -- throws IOException { -+ throws IOException { - if (!HttpServer.isInstrumentationAccessAllowed(getServletContext(), req, resp)) { - resp.setStatus(HttpServletResponse.SC_UNAUTHORIZED); - setResponseHeader(resp); -@@ -180,10 +187,11 @@ - if (asyncProfilerHome == null || asyncProfilerHome.trim().isEmpty()) { - resp.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); - setResponseHeader(resp); -- resp.getWriter().write("ASYNC_PROFILER_HOME env is not set.\n\n" + -- "Please ensure the prerequsites for the Profiler Servlet have been installed and the\n" + -- "environment is properly configured. For more information please see\n" + -- "http://hbase.apache.org/book.html#profiler\n"); -+ resp.getWriter() -+ .write("ASYNC_PROFILER_HOME env is not set.\n\n" -+ + "Please ensure the prerequisites for the Profiler Servlet have been installed and the\n" -+ + "environment is properly configured. For more information please see\n" -+ + "https://hbase.apache.org/book.html#profiler\n"); - return; - } - -@@ -194,42 +202,39 @@ - if (pid == null) { - resp.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); - setResponseHeader(resp); -- resp.getWriter().write( -- "'pid' query parameter unspecified or unable to determine PID of current process."); -+ resp.getWriter() -+ .write("'pid' query parameter unspecified or unable to determine PID of current process."); - return; - } - -- final int duration = getInteger(req, "duration", DEFAULT_DURATION_SECONDS); -- final Output output = getOutput(req); -- final Event event = getEvent(req); -- final Long interval = getLong(req, "interval"); -- final Integer jstackDepth = getInteger(req, "jstackdepth", null); -- final Long bufsize = getLong(req, "bufsize"); -- final boolean thread = req.getParameterMap().containsKey("thread"); -- final boolean simple = req.getParameterMap().containsKey("simple"); -- final Integer width = getInteger(req, "width", null); -- final Integer height = getInteger(req, "height", null); -- final Double minwidth = getMinWidth(req); -- final boolean reverse = req.getParameterMap().containsKey("reverse"); -+ Event event = getEvent(req); -+ int duration = getInteger(req, "duration", DEFAULT_DURATION_SECONDS); -+ Long interval = getLong(req, "interval"); -+ Integer jstackDepth = getInteger(req, "jstackdepth", null); -+ boolean thread = req.getParameterMap().containsKey("thread"); -+ boolean simple = req.getParameterMap().containsKey("simple"); -+ boolean signature = req.getParameterMap().containsKey("signature"); -+ boolean annotate = req.getParameterMap().containsKey("annotate"); -+ boolean prependLib = req.getParameterMap().containsKey("prependlib"); -+ Output output = getOutput(req); -+ Double minwidth = getMinWidth(req); -+ boolean reverse = req.getParameterMap().containsKey("reverse"); - - if (process == null || !process.isAlive()) { - try { - int lockTimeoutSecs = 3; - if (profilerLock.tryLock(lockTimeoutSecs, TimeUnit.SECONDS)) { - try { -- File outputFile = new File(OUTPUT_DIR, "async-prof-pid-" + pid + "-" + -- event.name().toLowerCase() + "-" + ID_GEN.incrementAndGet() + "." + -- output.name().toLowerCase()); -+ File outputFile = -+ new File(OUTPUT_DIR, "async-prof-pid-" + pid + "-" + event.name().toLowerCase() + "-" -+ + ID_GEN.incrementAndGet() + "." + output.name().toLowerCase()); -+ - List cmd = new ArrayList<>(); - cmd.add(asyncProfilerHome + PROFILER_SCRIPT); - cmd.add("-e"); - cmd.add(event.getInternalName()); - cmd.add("-d"); -- cmd.add("" + duration); -- cmd.add("-o"); -- cmd.add(output.name().toLowerCase()); -- cmd.add("-f"); -- cmd.add(outputFile.getAbsolutePath()); -+ cmd.add(String.valueOf(duration)); - if (interval != null) { - cmd.add("-i"); - cmd.add(interval.toString()); -@@ -238,24 +243,25 @@ - cmd.add("-j"); - cmd.add(jstackDepth.toString()); - } -- if (bufsize != null) { -- cmd.add("-b"); -- cmd.add(bufsize.toString()); -- } - if (thread) { - cmd.add("-t"); - } - if (simple) { - cmd.add("-s"); - } -- if (width != null) { -- cmd.add("--width"); -- cmd.add(width.toString()); -+ if (signature) { -+ cmd.add("-g"); - } -- if (height != null) { -- cmd.add("--height"); -- cmd.add(height.toString()); -+ if (annotate) { -+ cmd.add("-a"); - } -+ if (prependLib) { -+ cmd.add("-l"); -+ } -+ cmd.add("-o"); -+ cmd.add(output.name().toLowerCase()); -+ cmd.add("-f"); -+ cmd.add(outputFile.getAbsolutePath()); - if (minwidth != null) { - cmd.add("--minwidth"); - cmd.add(minwidth.toString()); -@@ -263,6 +269,7 @@ - if (reverse) { - cmd.add("--reverse"); - } -+ - cmd.add(pid.toString()); - process = ProcessUtils.runCmdAsync(cmd); - -@@ -270,11 +277,13 @@ - setResponseHeader(resp); - resp.setStatus(HttpServletResponse.SC_ACCEPTED); - String relativeUrl = "/prof-output-hbase/" + outputFile.getName(); -- resp.getWriter().write( -- "Started [" + event.getInternalName() + -- "] profiling. This page will automatically redirect to " + -- relativeUrl + " after " + duration + " seconds.\n\nCommand:\n" + -- Joiner.on(" ").join(cmd)); -+ resp.getWriter() -+ .write("Started [" + event.getInternalName() -+ + "] profiling. This page will automatically redirect to " + relativeUrl + " after " -+ + duration + " seconds. " -+ + "If empty diagram and Linux 4.6+, see 'Basic Usage' section on the Async " -+ + "Profiler Home Page, https://github.com/jvm-profiling-tools/async-profiler." -+ + "\n\nCommand:\n" + Joiner.on(" ").join(cmd)); - - // to avoid auto-refresh by ProfileOutputServlet, refreshDelay can be specified - // via url param -@@ -290,10 +299,11 @@ - } else { - setResponseHeader(resp); - resp.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); -- resp.getWriter().write( -- "Unable to acquire lock. Another instance of profiler might be running."); -- LOG.warn("Unable to acquire lock in " + lockTimeoutSecs + -- " seconds. Another instance of profiler might be running."); -+ resp.getWriter() -+ .write("Unable to acquire lock. Another instance of profiler might be running."); -+ LOG.warn( -+ "Unable to acquire lock in {} seconds. Another instance of profiler might be running.", -+ lockTimeoutSecs); - } - } catch (InterruptedException e) { - LOG.warn("Interrupted while acquiring profile lock.", e); -@@ -306,9 +316,9 @@ - } - } - -- private Integer getInteger(final HttpServletRequest req, final String param, -- final Integer defaultValue) { -- final String value = req.getParameter(param); -+ private static Integer getInteger(final HttpServletRequest req, final String param, -+ final Integer defaultValue) { -+ String value = req.getParameter(param); - if (value != null) { - try { - return Integer.valueOf(value); -@@ -319,8 +329,8 @@ - return defaultValue; - } - -- private Long getLong(final HttpServletRequest req, final String param) { -- final String value = req.getParameter(param); -+ private static Long getLong(final HttpServletRequest req, final String param) { -+ String value = req.getParameter(param); - if (value != null) { - try { - return Long.valueOf(value); -@@ -331,8 +341,8 @@ - return null; - } - -- private Double getMinWidth(final HttpServletRequest req) { -- final String value = req.getParameter("minwidth"); -+ private static Double getMinWidth(final HttpServletRequest req) { -+ String value = req.getParameter("minwidth"); - if (value != null) { - try { - return Double.valueOf(value); -@@ -343,8 +353,8 @@ - return null; - } - -- private Event getEvent(final HttpServletRequest req) { -- final String eventArg = req.getParameter("event"); -+ private static Event getEvent(final HttpServletRequest req) { -+ String eventArg = req.getParameter("event"); - if (eventArg != null) { - Event event = Event.fromInternalName(eventArg); - return event == null ? Event.CPU : event; -@@ -352,16 +362,16 @@ - return Event.CPU; - } - -- private Output getOutput(final HttpServletRequest req) { -- final String outputArg = req.getParameter("output"); -+ private static Output getOutput(final HttpServletRequest req) { -+ String outputArg = req.getParameter("output"); - if (req.getParameter("output") != null) { - try { - return Output.valueOf(outputArg.trim().toUpperCase()); - } catch (IllegalArgumentException e) { -- return Output.SVG; -+ return Output.FLAMEGRAPH; - } - } -- return Output.SVG; -+ return Output.FLAMEGRAPH; - } - - static void setResponseHeader(final HttpServletResponse response) { -@@ -386,14 +396,14 @@ - - @Override - protected void doGet(final HttpServletRequest req, final HttpServletResponse resp) -- throws IOException { -+ throws IOException { - resp.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); - setResponseHeader(resp); -- resp.getWriter().write("The profiler servlet was disabled at startup.\n\n" + -- "Please ensure the prerequisites for the Profiler Servlet have been installed and the\n" + -- "environment is properly configured. For more information please see\n" + -- "http://hbase.apache.org/book.html#profiler\n"); -- return; -+ resp.getWriter() -+ .write("The profiler servlet was disabled at startup.\n\n" -+ + "Please ensure the prerequisites for the Profiler Servlet have been installed and the\n" -+ + "environment is properly configured. For more information please see\n" -+ + "https://hbase.apache.org/book.html#profiler\n"); - } - - } diff --git a/hbase/stackable/patches/2.4.17/001-HBASE-27860-2.4.17.patch b/hbase/stackable/patches/2.4.17/001-HBASE-27860-2.4.17.patch deleted file mode 100644 index 862f35335..000000000 --- a/hbase/stackable/patches/2.4.17/001-HBASE-27860-2.4.17.patch +++ /dev/null @@ -1,32 +0,0 @@ -From 4f22587d02311b91c8371d5e6b9a2758d39ab26e Mon Sep 17 00:00:00 2001 -From: Shuhei Yamasaki -Date: Tue, 9 May 2023 13:48:28 +0900 -Subject: [PATCH] Add packages for hadoop-3.3.5 - ---- - hbase-shaded/pom.xml | 12 ++++++++++++ - 1 file changed, 12 insertions(+) - -diff --git a/hbase-shaded/pom.xml b/hbase-shaded/pom.xml -index d71b01184eeb..6477a16a877c 100644 ---- a/hbase-shaded/pom.xml -+++ b/hbase-shaded/pom.xml -@@ -129,6 +129,18 @@ - - - -+ -+ com.sun.istack -+ ${shaded.prefix}.com.sun.istack -+ -+ -+ com.sun.jersey -+ ${shaded.prefix}.com.sun.jersey -+ -+ -+ com.sun.xml -+ ${shaded.prefix}.com.sun.xml -+ - - com.cedarsoftware - ${shaded.prefix}.com.cedarsoftware diff --git a/hbase/stackable/patches/2.4.17/002-HBASE-25292-2.4.17.patch b/hbase/stackable/patches/2.4.17/002-HBASE-25292-2.4.17.patch deleted file mode 100644 index 85d87f2be..000000000 --- a/hbase/stackable/patches/2.4.17/002-HBASE-25292-2.4.17.patch +++ /dev/null @@ -1,1574 +0,0 @@ -From c5e6b4d0909693a5b9819e6b41b605c08c562778 Mon Sep 17 00:00:00 2001 -From: Andrew Purtell -Date: Sat, 28 Nov 2020 14:01:22 +0100 -Subject: [PATCH] HBASE-25292 Improve InetSocketAddress usage discipline - (#2669) - -Network identities should be bound late. Remote addresses should be -resolved at the last possible moment, just before connect(). Network -identity mappings can change, so our code should not inappropriately -cache them. Otherwise we might miss a change and fail to operate normally. - -Revert "HBASE-14544 Allow HConnectionImpl to not refresh the dns on errors" -Removes hbase.resolve.hostnames.on.failure and related code. We always -resolve hostnames, as late as possible. - -Preserve InetSocketAddress caching per RPC connection. Avoids potential -lookups per Call. - -Replace InetSocketAddress with Address where used as a map key. If we want -to key by hostname and/or resolved address we should be explicit about it. -Using Address chooses mapping by hostname and port only. - -Add metrics for potential nameservice resolution attempts, whenever an -InetSocketAddress is instantiated for connect; and metrics for failed -resolution, whenever InetSocketAddress#isUnresolved on the new instance -is true. - -* Use ServerName directly to build a stub key - -* Resolve and cache ISA on a RpcChannel as late as possible, at first call - -* Remove now invalid unit test TestCIBadHostname - -We resolve DNS at the latest possible time, at first call, and do not -resolve hostnames for creating stubs at all, so this unit test cannot -work now. - -Reviewed-by: Mingliang Liu -Signed-off-by: Duo Zhang ---- - .../hbase/client/AsyncConnectionImpl.java | 11 +- - .../client/ConnectionImplementation.java | 14 +-- - .../hadoop/hbase/client/ConnectionUtils.java | 28 ++--- - .../hbase/client/MetricsConnection.java | 16 +++ - .../hadoop/hbase/ipc/AbstractRpcClient.java | 116 +++++++++++------- - .../hbase/ipc/BlockingRpcConnection.java | 61 +++++---- - .../apache/hadoop/hbase/ipc/ConnectionId.java | 16 +-- - .../hadoop/hbase/ipc/FailedServers.java | 21 ++-- - .../org/apache/hadoop/hbase/ipc/IPCUtil.java | 6 +- - .../hadoop/hbase/ipc/NettyRpcConnection.java | 37 ++++-- - .../apache/hadoop/hbase/ipc/RpcClient.java | 8 +- - .../hadoop/hbase/ipc/RpcConnection.java | 17 +-- - .../hbase/ipc/ServerTooBusyException.java | 8 ++ - .../client/TestMasterRegistryHedgedReads.java | 6 +- - .../hadoop/hbase/ipc/TestConnectionId.java | 6 +- - .../hbase/ipc/TestFailedServersLog.java | 6 +- - .../apache/hadoop/hbase/ipc/TestIPCUtil.java | 4 +- - .../hbase/ipc/TestNettyRpcConnection.java | 4 +- - .../org/apache/hadoop/hbase/net/Address.java | 44 +++++-- - .../hbase/io/hfile/MemcachedBlockCache.java | 3 + - .../hbase/regionserver/HRegionServer.java | 36 +++--- - .../hbase/client/TestCIBadHostname.java | 97 --------------- - .../hbase/client/TestClientTimeouts.java | 17 ++- - .../hadoop/hbase/ipc/TestHBaseClient.java | 12 +- - .../apache/hadoop/hbase/zookeeper/ZKUtil.java | 7 +- - 25 files changed, 305 insertions(+), 296 deletions(-) - delete mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCIBadHostname.java - -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java -index 2e5d1813aa6..2bea6abe7a7 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java -@@ -28,6 +28,8 @@ import static org.apache.hadoop.hbase.client.NonceGenerator.CLIENT_NONCES_ENABLE - import static org.apache.hadoop.hbase.util.FutureUtils.addListener; - - import java.io.IOException; -+import java.net.InetAddress; -+import java.net.InetSocketAddress; - import java.util.Optional; - import java.util.concurrent.CompletableFuture; - import java.util.concurrent.ConcurrentHashMap; -@@ -77,8 +79,6 @@ class AsyncConnectionImpl implements AsyncConnection { - .setUncaughtExceptionHandler(Threads.LOGGING_EXCEPTION_HANDLER).build(), - 10, TimeUnit.MILLISECONDS); - -- private static final String RESOLVE_HOSTNAME_ON_FAIL_KEY = "hbase.resolve.hostnames.on.failure"; -- - private final Configuration conf; - - final AsyncConnectionConfiguration connConf; -@@ -93,8 +93,6 @@ class AsyncConnectionImpl implements AsyncConnection { - - final RpcControllerFactory rpcControllerFactory; - -- private final boolean hostnameCanChange; -- - private final AsyncRegionLocator locator; - - final AsyncRpcRetryingCallerFactory callerFactory; -@@ -137,7 +135,6 @@ class AsyncConnectionImpl implements AsyncConnection { - } - this.rpcClient = RpcClientFactory.createClient(conf, clusterId, metrics.orElse(null)); - this.rpcControllerFactory = RpcControllerFactory.instantiate(conf); -- this.hostnameCanChange = conf.getBoolean(RESOLVE_HOSTNAME_ON_FAIL_KEY, true); - this.rpcTimeout = - (int) Math.min(Integer.MAX_VALUE, TimeUnit.NANOSECONDS.toMillis(connConf.getRpcTimeoutNs())); - this.locator = new AsyncRegionLocator(this, RETRY_TIMER); -@@ -258,7 +255,7 @@ class AsyncConnectionImpl implements AsyncConnection { - - ClientService.Interface getRegionServerStub(ServerName serverName) throws IOException { - return ConcurrentMapUtils.computeIfAbsentEx(rsStubs, -- getStubKey(ClientService.getDescriptor().getName(), serverName, hostnameCanChange), -+ getStubKey(ClientService.getDescriptor().getName(), serverName), - () -> createRegionServerStub(serverName)); - } - -@@ -272,7 +269,7 @@ class AsyncConnectionImpl implements AsyncConnection { - - AdminService.Interface getAdminStub(ServerName serverName) throws IOException { - return ConcurrentMapUtils.computeIfAbsentEx(adminSubs, -- getStubKey(AdminService.getDescriptor().getName(), serverName, hostnameCanChange), -+ getStubKey(AdminService.getDescriptor().getName(), serverName), - () -> createAdminServerStub(serverName)); - } - -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java -index 1b10775721f..efc6ecbc6f7 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java -@@ -165,9 +165,6 @@ class ConnectionImplementation implements ClusterConnection, Closeable { - "hbase.client.master.state.cache.timeout.sec"; - private static final Logger LOG = LoggerFactory.getLogger(ConnectionImplementation.class); - -- private static final String RESOLVE_HOSTNAME_ON_FAIL_KEY = "hbase.resolve.hostnames.on.failure"; -- -- private final boolean hostnamesCanChange; - private final long pause; - private final long pauseForCQTBE;// pause for CallQueueTooBigException, if specified - // The mode tells if HedgedRead, LoadBalance mode is supported. -@@ -308,7 +305,6 @@ class ConnectionImplementation implements ClusterConnection, Closeable { - - boolean shouldListen = - conf.getBoolean(HConstants.STATUS_PUBLISHED, HConstants.STATUS_PUBLISHED_DEFAULT); -- this.hostnamesCanChange = conf.getBoolean(RESOLVE_HOSTNAME_ON_FAIL_KEY, true); - Class listenerClass = - conf.getClass(ClusterStatusListener.STATUS_LISTENER_CLASS, - ClusterStatusListener.DEFAULT_STATUS_LISTENER_CLASS, ClusterStatusListener.Listener.class); -@@ -512,7 +508,7 @@ class ConnectionImplementation implements ClusterConnection, Closeable { - throw new RegionServerStoppedException(masterServer + " is dead."); - } - String key = getStubKey(MasterProtos.HbckService.BlockingInterface.class.getName(), -- masterServer, this.hostnamesCanChange); -+ masterServer); - - return new HBaseHbck( - (MasterProtos.HbckService.BlockingInterface) computeIfAbsentEx(stubs, key, () -> { -@@ -1301,8 +1297,7 @@ class ConnectionImplementation implements ClusterConnection, Closeable { - throw new MasterNotRunningException(sn + " is dead."); - } - // Use the security info interface name as our stub key -- String key = -- getStubKey(MasterProtos.MasterService.getDescriptor().getName(), sn, hostnamesCanChange); -+ String key = getStubKey(MasterProtos.MasterService.getDescriptor().getName(), sn); - MasterProtos.MasterService.BlockingInterface stub = - (MasterProtos.MasterService.BlockingInterface) computeIfAbsentEx(stubs, key, () -> { - BlockingRpcChannel channel = rpcClient.createBlockingRpcChannel(sn, user, rpcTimeout); -@@ -1350,8 +1345,7 @@ class ConnectionImplementation implements ClusterConnection, Closeable { - if (isDeadServer(serverName)) { - throw new RegionServerStoppedException(serverName + " is dead."); - } -- String key = getStubKey(AdminProtos.AdminService.BlockingInterface.class.getName(), serverName, -- this.hostnamesCanChange); -+ String key = getStubKey(AdminProtos.AdminService.BlockingInterface.class.getName(), serverName); - return (AdminProtos.AdminService.BlockingInterface) computeIfAbsentEx(stubs, key, () -> { - BlockingRpcChannel channel = - this.rpcClient.createBlockingRpcChannel(serverName, user, rpcTimeout); -@@ -1366,7 +1360,7 @@ class ConnectionImplementation implements ClusterConnection, Closeable { - throw new RegionServerStoppedException(serverName + " is dead."); - } - String key = getStubKey(ClientProtos.ClientService.BlockingInterface.class.getName(), -- serverName, this.hostnamesCanChange); -+ serverName); - return (ClientProtos.ClientService.BlockingInterface) computeIfAbsentEx(stubs, key, () -> { - BlockingRpcChannel channel = - this.rpcClient.createBlockingRpcChannel(serverName, user, rpcTimeout); -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java -index d85b4399aff..7e19572fa59 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java -@@ -25,6 +25,7 @@ import static org.apache.hadoop.hbase.util.FutureUtils.addListener; - import java.io.IOException; - import java.lang.reflect.UndeclaredThrowableException; - import java.net.InetAddress; -+import java.net.InetSocketAddress; - import java.net.UnknownHostException; - import java.util.Arrays; - import java.util.List; -@@ -152,32 +153,17 @@ public final class ConnectionUtils { - } - - /** -- * Return retires + 1. The returned value will be in range [1, Integer.MAX_VALUE]. -+ * Get a unique key for the rpc stub to the given server. - */ -- static int retries2Attempts(int retries) { -- return Math.max(1, retries == Integer.MAX_VALUE ? Integer.MAX_VALUE : retries + 1); -+ static String getStubKey(String serviceName, ServerName serverName) { -+ return String.format("%s@%s", serviceName, serverName); - } - - /** -- * Get a unique key for the rpc stub to the given server. -+ * Return retires + 1. The returned value will be in range [1, Integer.MAX_VALUE]. - */ -- static String getStubKey(String serviceName, ServerName serverName, boolean hostnameCanChange) { -- // Sometimes, servers go down and they come back up with the same hostname but a different -- // IP address. Force a resolution of the rsHostname by trying to instantiate an -- // InetSocketAddress, and this way we will rightfully get a new stubKey. -- // Also, include the hostname in the key so as to take care of those cases where the -- // DNS name is different but IP address remains the same. -- String hostname = serverName.getHostname(); -- int port = serverName.getPort(); -- if (hostnameCanChange) { -- try { -- InetAddress ip = InetAddress.getByName(hostname); -- return serviceName + "@" + hostname + "-" + ip.getHostAddress() + ":" + port; -- } catch (UnknownHostException e) { -- LOG.warn("Can not resolve " + hostname + ", please check your network", e); -- } -- } -- return serviceName + "@" + hostname + ":" + port; -+ static int retries2Attempts(int retries) { -+ return Math.max(1, retries == Integer.MAX_VALUE ? Integer.MAX_VALUE : retries + 1); - } - - static void checkHasFamilies(Mutation mutation) { -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java -index 16a13df1e68..a9848a2a65c 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java -@@ -69,6 +69,8 @@ public class MetricsConnection implements StatisticTrackable { - private static final String HEAP_BASE = "heapOccupancy_"; - private static final String CACHE_BASE = "cacheDroppingExceptions_"; - private static final String UNKNOWN_EXCEPTION = "UnknownException"; -+ private static final String NS_LOOKUPS = "nsLookups"; -+ private static final String NS_LOOKUPS_FAILED = "nsLookupsFailed"; - private static final String CLIENT_SVC = ClientService.getDescriptor().getName(); - - /** A container class for collecting details about the RPC call as it percolates. */ -@@ -293,6 +295,9 @@ public class MetricsConnection implements StatisticTrackable { - protected final Timer userRegionLockWaitingTimer; - protected final Timer userRegionLockHeldTimer; - protected final Histogram userRegionLockQueueHist; -+ protected final Counter nsLookups; -+ protected final Counter nsLookupsFailed; -+ - - // dynamic metrics - -@@ -360,6 +365,9 @@ public class MetricsConnection implements StatisticTrackable { - registry.timer(name(this.getClass(), "userRegionLockHeldDuration", scope)); - this.userRegionLockQueueHist = - registry.histogram(name(MetricsConnection.class, "userRegionLockQueueLength", scope)); -+ this.nsLookups = registry.counter(name(this.getClass(), NS_LOOKUPS, scope)); -+ this.nsLookupsFailed = registry.counter(name(this.getClass(), NS_LOOKUPS_FAILED, scope)); -+ - - this.reporter = JmxReporter.forRegistry(this.registry).build(); - this.reporter.start(); -@@ -564,4 +572,12 @@ public class MetricsConnection implements StatisticTrackable { - CACHE_BASE + (exception == null ? UNKNOWN_EXCEPTION : exception.getClass().getSimpleName()), - cacheDroppingExceptions, counterFactory).inc(); - } -+ -+ public void incrNsLookups() { -+ this.nsLookups.inc(); -+ } -+ -+ public void incrNsLookupsFailed() { -+ this.nsLookupsFailed.inc(); -+ } - } -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java -index 4e437909ab3..f7a041e52b8 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java -@@ -36,6 +36,7 @@ import org.apache.hadoop.hbase.ServerName; - import org.apache.hadoop.hbase.client.MetricsConnection; - import org.apache.hadoop.hbase.codec.Codec; - import org.apache.hadoop.hbase.codec.KeyValueCodec; -+import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.security.User; - import org.apache.hadoop.hbase.security.UserProvider; - import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -@@ -133,14 +134,13 @@ public abstract class AbstractRpcClient implements RpcC - - private int maxConcurrentCallsPerServer; - -- private static final LoadingCache concurrentCounterCache = -- CacheBuilder.newBuilder().expireAfterAccess(1, TimeUnit.HOURS) -- .build(new CacheLoader() { -- @Override -- public AtomicInteger load(InetSocketAddress key) throws Exception { -- return new AtomicInteger(0); -- } -- }); -+ private static final LoadingCache concurrentCounterCache = -+ CacheBuilder.newBuilder().expireAfterAccess(1, TimeUnit.HOURS). -+ build(new CacheLoader() { -+ @Override public AtomicInteger load(Address key) throws Exception { -+ return new AtomicInteger(0); -+ } -+ }); - - /** - * Construct an IPC client for the cluster clusterId -@@ -206,7 +206,7 @@ public abstract class AbstractRpcClient implements RpcC - // The connection itself will disconnect if there is no pending call for maxIdleTime. - if (conn.getLastTouched() < closeBeforeTime && !conn.isActive()) { - if (LOG.isTraceEnabled()) { -- LOG.trace("Cleanup idle connection to {}", conn.remoteId().address); -+ LOG.trace("Cleanup idle connection to {}", conn.remoteId().getAddress()); - } - connections.remove(conn.remoteId(), conn); - conn.cleanupConnection(); -@@ -344,11 +344,11 @@ public abstract class AbstractRpcClient implements RpcC - private T getConnection(ConnectionId remoteId) throws IOException { - if (failedServers.isFailedServer(remoteId.getAddress())) { - if (LOG.isDebugEnabled()) { -- LOG.debug("Not trying to connect to " + remoteId.address -- + " this server is in the failed servers list"); -+ LOG.debug("Not trying to connect to " + remoteId.getAddress() -+ + " this server is in the failed servers list"); - } - throw new FailedServerException( -- "This server is in the failed servers list: " + remoteId.address); -+ "This server is in the failed servers list: " + remoteId.getAddress()); - } - T conn; - synchronized (connections) { -@@ -366,8 +366,8 @@ public abstract class AbstractRpcClient implements RpcC - */ - protected abstract T createConnection(ConnectionId remoteId) throws IOException; - -- private void onCallFinished(Call call, HBaseRpcController hrc, InetSocketAddress addr, -- RpcCallback callback) { -+ private void onCallFinished(Call call, HBaseRpcController hrc, Address addr, -+ RpcCallback callback) { - call.callStats.setCallTimeMs(EnvironmentEdgeManager.currentTime() - call.getStartTime()); - if (metrics != null) { - metrics.updateRpc(call.md, call.param, call.callStats, call.error); -@@ -392,8 +392,8 @@ public abstract class AbstractRpcClient implements RpcC - } - - Call callMethod(final Descriptors.MethodDescriptor md, final HBaseRpcController hrc, -- final Message param, Message returnType, final User ticket, final InetSocketAddress addr, -- final RpcCallback callback) { -+ final Message param, Message returnType, final User ticket, -+ final InetSocketAddress inetAddr, final RpcCallback callback) { - final MetricsConnection.CallStats cs = MetricsConnection.newCallStats(); - cs.setStartTime(EnvironmentEdgeManager.currentTime()); - -@@ -407,6 +407,7 @@ public abstract class AbstractRpcClient implements RpcC - cs.setNumActionsPerServer(numActions); - } - -+ final Address addr = Address.fromSocketAddress(inetAddr); - final AtomicInteger counter = concurrentCounterCache.getUnchecked(addr); - Call call = new Call(nextCallId(), md, param, hrc.cellScanner(), returnType, - hrc.getCallTimeout(), hrc.getPriority(), new RpcCallback() { -@@ -431,12 +432,8 @@ public abstract class AbstractRpcClient implements RpcC - return call; - } - -- InetSocketAddress createAddr(ServerName sn) throws UnknownHostException { -- InetSocketAddress addr = new InetSocketAddress(sn.getHostname(), sn.getPort()); -- if (addr.isUnresolved()) { -- throw new UnknownHostException("can not resolve " + sn.getServerName()); -- } -- return addr; -+ private static Address createAddr(ServerName sn) { -+ return Address.fromParts(sn.getHostname(), sn.getPort()); - } - - /** -@@ -452,8 +449,8 @@ public abstract class AbstractRpcClient implements RpcC - for (T connection : connections.values()) { - ConnectionId remoteId = connection.remoteId(); - if ( -- remoteId.address.getPort() == sn.getPort() -- && remoteId.address.getHostName().equals(sn.getHostname()) -+ remoteId.getAddress().getPort() == sn.getPort() -+ && remoteId.getAddress().getHostname().equals(sn.getHostname()) - ) { - LOG.info("The server on " + sn.toString() + " is dead - stopping the connection " - + connection.remoteId); -@@ -514,19 +511,25 @@ public abstract class AbstractRpcClient implements RpcC - - @Override - public BlockingRpcChannel createBlockingRpcChannel(final ServerName sn, final User ticket, -- int rpcTimeout) throws UnknownHostException { -+ int rpcTimeout) { - return new BlockingRpcChannelImplementation(this, createAddr(sn), ticket, rpcTimeout); - } - - @Override -- public RpcChannel createRpcChannel(ServerName sn, User user, int rpcTimeout) -- throws UnknownHostException { -+ public RpcChannel createRpcChannel(ServerName sn, User user, int rpcTimeout) { - return new RpcChannelImplementation(this, createAddr(sn), user, rpcTimeout); - } - - private static class AbstractRpcChannel { - -- protected final InetSocketAddress addr; -+ protected final Address addr; -+ -+ // We cache the resolved InetSocketAddress for the channel so we do not do a DNS lookup -+ // per method call on the channel. If the remote target is removed or reprovisioned and -+ // its identity changes a new channel with a newly resolved InetSocketAddress will be -+ // created as part of retry, so caching here is fine. -+ // Normally, caching an InetSocketAddress is an anti-pattern. -+ protected InetSocketAddress isa; - - protected final AbstractRpcClient rpcClient; - -@@ -534,8 +537,8 @@ public abstract class AbstractRpcClient implements RpcC - - protected final int rpcTimeout; - -- protected AbstractRpcChannel(AbstractRpcClient rpcClient, InetSocketAddress addr, -- User ticket, int rpcTimeout) { -+ protected AbstractRpcChannel(AbstractRpcClient rpcClient, Address addr, -+ User ticket, int rpcTimeout) { - this.addr = addr; - this.rpcClient = rpcClient; - this.ticket = ticket; -@@ -571,15 +574,29 @@ public abstract class AbstractRpcClient implements RpcC - implements BlockingRpcChannel { - - protected BlockingRpcChannelImplementation(AbstractRpcClient rpcClient, -- InetSocketAddress addr, User ticket, int rpcTimeout) { -+ Address addr, User ticket, int rpcTimeout) { - super(rpcClient, addr, ticket, rpcTimeout); - } - - @Override - public Message callBlockingMethod(Descriptors.MethodDescriptor md, RpcController controller, -- Message param, Message returnType) throws ServiceException { -- return rpcClient.callBlockingMethod(md, configureRpcController(controller), param, returnType, -- ticket, addr); -+ Message param, Message returnType) throws ServiceException { -+ // Look up remote address upon first call -+ if (isa == null) { -+ if (this.rpcClient.metrics != null) { -+ this.rpcClient.metrics.incrNsLookups(); -+ } -+ isa = Address.toSocketAddress(addr); -+ if (isa.isUnresolved()) { -+ if (this.rpcClient.metrics != null) { -+ this.rpcClient.metrics.incrNsLookupsFailed(); -+ } -+ isa = null; -+ throw new ServiceException(new UnknownHostException(addr + " could not be resolved")); -+ } -+ } -+ return rpcClient.callBlockingMethod(md, configureRpcController(controller), -+ param, returnType, ticket, isa); - } - } - -@@ -588,20 +605,35 @@ public abstract class AbstractRpcClient implements RpcC - */ - public static class RpcChannelImplementation extends AbstractRpcChannel implements RpcChannel { - -- protected RpcChannelImplementation(AbstractRpcClient rpcClient, InetSocketAddress addr, -- User ticket, int rpcTimeout) throws UnknownHostException { -+ protected RpcChannelImplementation(AbstractRpcClient rpcClient, Address addr, -+ User ticket, int rpcTimeout) { - super(rpcClient, addr, ticket, rpcTimeout); - } - - @Override -- public void callMethod(Descriptors.MethodDescriptor md, RpcController controller, Message param, -- Message returnType, RpcCallback done) { -+ public void callMethod(Descriptors.MethodDescriptor md, RpcController controller, -+ Message param, Message returnType, RpcCallback done) { -+ HBaseRpcController configuredController = -+ configureRpcController(Preconditions.checkNotNull(controller, -+ "RpcController can not be null for async rpc call")); -+ // Look up remote address upon first call -+ if (isa == null || isa.isUnresolved()) { -+ if (this.rpcClient.metrics != null) { -+ this.rpcClient.metrics.incrNsLookups(); -+ } -+ isa = Address.toSocketAddress(addr); -+ if (isa.isUnresolved()) { -+ if (this.rpcClient.metrics != null) { -+ this.rpcClient.metrics.incrNsLookupsFailed(); -+ } -+ isa = null; -+ controller.setFailed(addr + " could not be resolved"); -+ return; -+ } -+ } - // This method does not throw any exceptions, so the caller must provide a - // HBaseRpcController which is used to pass the exceptions. -- this.rpcClient.callMethod(md, -- configureRpcController(Preconditions.checkNotNull(controller, -- "RpcController can not be null for async rpc call")), -- param, returnType, ticket, addr, done); -+ this.rpcClient.callMethod(md, configuredController, param, returnType, ticket, isa, done); - } - } - } -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java -index 60e2502524e..c623004f131 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java -@@ -32,6 +32,7 @@ import java.io.IOException; - import java.io.InputStream; - import java.io.InterruptedIOException; - import java.io.OutputStream; -+import java.net.InetSocketAddress; - import java.net.Socket; - import java.net.SocketTimeoutException; - import java.net.UnknownHostException; -@@ -50,6 +51,7 @@ import org.apache.hadoop.hbase.exceptions.ConnectionClosingException; - import org.apache.hadoop.hbase.io.ByteArrayOutputStream; - import org.apache.hadoop.hbase.ipc.HBaseRpcController.CancellationCallback; - import org.apache.hadoop.hbase.log.HBaseMarkers; -+import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.security.HBaseSaslRpcClient; - import org.apache.hadoop.hbase.security.SaslUtil; - import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection; -@@ -207,8 +209,8 @@ class BlockingRpcConnection extends RpcConnection implements Runnable { - * Cleans the call not yet sent when we finish. - */ - public void cleanup(IOException e) { -- IOException ie = -- new ConnectionClosingException("Connection to " + remoteId.address + " is closing."); -+ IOException ie = new ConnectionClosingException( -+ "Connection to " + remoteId.getAddress() + " is closing."); - for (Call call : callsToWrite) { - call.setException(ie); - } -@@ -218,12 +220,9 @@ class BlockingRpcConnection extends RpcConnection implements Runnable { - - BlockingRpcConnection(BlockingRpcClient rpcClient, ConnectionId remoteId) throws IOException { - super(rpcClient.conf, AbstractRpcClient.WHEEL_TIMER, remoteId, rpcClient.clusterId, -- rpcClient.userProvider.isHBaseSecurityEnabled(), rpcClient.codec, rpcClient.compressor); -+ rpcClient.userProvider.isHBaseSecurityEnabled(), rpcClient.codec, rpcClient.compressor, -+ rpcClient.metrics); - this.rpcClient = rpcClient; -- if (remoteId.getAddress().isUnresolved()) { -- throw new UnknownHostException("unknown host: " + remoteId.getAddress().getHostName()); -- } -- - this.connectionHeaderPreamble = getConnectionHeaderPreamble(); - ConnectionHeader header = getConnectionHeader(); - ByteArrayOutputStream baos = new ByteArrayOutputStream(4 + header.getSerializedSize()); -@@ -258,7 +257,17 @@ class BlockingRpcConnection extends RpcConnection implements Runnable { - if (this.rpcClient.localAddr != null) { - this.socket.bind(this.rpcClient.localAddr); - } -- NetUtils.connect(this.socket, remoteId.getAddress(), this.rpcClient.connectTO); -+ if (this.rpcClient.metrics != null) { -+ this.rpcClient.metrics.incrNsLookups(); -+ } -+ InetSocketAddress remoteAddr = Address.toSocketAddress(remoteId.getAddress()); -+ if (remoteAddr.isUnresolved()) { -+ if (this.rpcClient.metrics != null) { -+ this.rpcClient.metrics.incrNsLookupsFailed(); -+ } -+ throw new UnknownHostException(remoteId.getAddress() + " could not be resolved"); -+ } -+ NetUtils.connect(this.socket, remoteAddr, this.rpcClient.connectTO); - this.socket.setSoTimeout(this.rpcClient.readTO); - return; - } catch (SocketTimeoutException toe) { -@@ -361,12 +370,22 @@ class BlockingRpcConnection extends RpcConnection implements Runnable { - } - - private boolean setupSaslConnection(final InputStream in2, final OutputStream out2) -- throws IOException { -- saslRpcClient = new HBaseSaslRpcClient(this.rpcClient.conf, provider, token, serverAddress, -- securityInfo, this.rpcClient.fallbackAllowed, -- this.rpcClient.conf.get("hbase.rpc.protection", -- QualityOfProtection.AUTHENTICATION.name().toLowerCase(Locale.ROOT)), -- this.rpcClient.conf.getBoolean(CRYPTO_AES_ENABLED_KEY, CRYPTO_AES_ENABLED_DEFAULT)); -+ throws IOException { -+ if (this.metrics != null) { -+ this.metrics.incrNsLookups(); -+ } -+ InetSocketAddress serverAddr = Address.toSocketAddress(remoteId.getAddress()); -+ if (serverAddr.isUnresolved()) { -+ if (this.metrics != null) { -+ this.metrics.incrNsLookupsFailed(); -+ } -+ throw new UnknownHostException(remoteId.getAddress() + " could not be resolved"); -+ } -+ saslRpcClient = new HBaseSaslRpcClient(this.rpcClient.conf, provider, token, -+ serverAddr.getAddress(), securityInfo, this.rpcClient.fallbackAllowed, -+ this.rpcClient.conf.get("hbase.rpc.protection", -+ QualityOfProtection.AUTHENTICATION.name().toLowerCase(Locale.ROOT)), -+ this.rpcClient.conf.getBoolean(CRYPTO_AES_ENABLED_KEY, CRYPTO_AES_ENABLED_DEFAULT)); - return saslRpcClient.saslConnect(in2, out2); - } - -@@ -441,16 +460,16 @@ class BlockingRpcConnection extends RpcConnection implements Runnable { - - if (this.rpcClient.failedServers.isFailedServer(remoteId.getAddress())) { - if (LOG.isDebugEnabled()) { -- LOG.debug("Not trying to connect to " + remoteId.address -- + " this server is in the failed servers list"); -+ LOG.debug("Not trying to connect to " + remoteId.getAddress() -+ + " this server is in the failed servers list"); - } - throw new FailedServerException( -- "This server is in the failed servers list: " + remoteId.address); -+ "This server is in the failed servers list: " + remoteId.getAddress()); - } - - try { - if (LOG.isDebugEnabled()) { -- LOG.debug("Connecting to " + remoteId.address); -+ LOG.debug("Connecting to " + remoteId.getAddress()); - } - - short numRetries = 0; -@@ -505,14 +524,14 @@ class BlockingRpcConnection extends RpcConnection implements Runnable { - closeSocket(); - IOException e = ExceptionUtil.asInterrupt(t); - if (e == null) { -- this.rpcClient.failedServers.addToFailedServers(remoteId.address, t); -+ this.rpcClient.failedServers.addToFailedServers(remoteId.getAddress(), t); - if (t instanceof LinkageError) { - // probably the hbase hadoop version does not match the running hadoop version - e = new DoNotRetryIOException(t); - } else if (t instanceof IOException) { - e = (IOException) t; - } else { -- e = new IOException("Could not set up IO Streams to " + remoteId.address, t); -+ e = new IOException("Could not set up IO Streams to " + remoteId.getAddress(), t); - } - } - throw e; -@@ -770,7 +789,7 @@ class BlockingRpcConnection extends RpcConnection implements Runnable { - if (callSender != null) { - callSender.interrupt(); - } -- closeConn(new IOException("connection to " + remoteId.address + " closed")); -+ closeConn(new IOException("connection to " + remoteId.getAddress() + " closed")); - } - - @Override -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionId.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionId.java -index 48aec5cc9aa..3428ca16c74 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionId.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionId.java -@@ -17,8 +17,9 @@ - */ - package org.apache.hadoop.hbase.ipc; - --import java.net.InetSocketAddress; - import java.util.Objects; -+ -+import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.security.User; - import org.apache.yetus.audience.InterfaceAudience; - -@@ -31,9 +32,9 @@ class ConnectionId { - private static final int PRIME = 16777619; - final User ticket; - final String serviceName; -- final InetSocketAddress address; -+ final Address address; - -- public ConnectionId(User ticket, String serviceName, InetSocketAddress address) { -+ public ConnectionId(User ticket, String serviceName, Address address) { - this.address = address; - this.ticket = ticket; - this.serviceName = serviceName; -@@ -43,7 +44,7 @@ class ConnectionId { - return this.serviceName; - } - -- public InetSocketAddress getAddress() { -+ public Address getAddress() { - return address; - } - -@@ -72,8 +73,9 @@ class ConnectionId { - return hashCode(ticket, serviceName, address); - } - -- public static int hashCode(User ticket, String serviceName, InetSocketAddress address) { -- return (address.hashCode() -- + PRIME * (PRIME * serviceName.hashCode() ^ (ticket == null ? 0 : ticket.hashCode()))); -+ public static int hashCode(User ticket, String serviceName, Address address) { -+ return (address.hashCode() + -+ PRIME * (PRIME * serviceName.hashCode() ^ -+ (ticket == null ? 0 : ticket.hashCode()))); - } - } -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FailedServers.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FailedServers.java -index b59d84d17f5..5f728bc5ed9 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FailedServers.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FailedServers.java -@@ -17,7 +17,6 @@ - */ - package org.apache.hadoop.hbase.ipc; - --import java.net.InetSocketAddress; - import java.util.HashMap; - import java.util.Map; - import org.apache.hadoop.conf.Configuration; -@@ -25,13 +24,15 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; - import org.apache.yetus.audience.InterfaceAudience; - import org.slf4j.Logger; - import org.slf4j.LoggerFactory; -+import org.apache.hadoop.hbase.net.Address; -+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; - - /** - * A class to manage a list of servers that failed recently. - */ - @InterfaceAudience.Private - public class FailedServers { -- private final Map failedServers = new HashMap(); -+ private final Map failedServers = new HashMap(); - private long latestExpiry = 0; - private final int recheckServersTimeout; - private static final Logger LOG = LoggerFactory.getLogger(FailedServers.class); -@@ -44,13 +45,14 @@ public class FailedServers { - /** - * Add an address to the list of the failed servers list. - */ -- public synchronized void addToFailedServers(InetSocketAddress address, Throwable throwable) { -+ public synchronized void addToFailedServers(Address address, Throwable throwable) { - final long expiry = EnvironmentEdgeManager.currentTime() + recheckServersTimeout; -- this.failedServers.put(address.toString(), expiry); -+ this.failedServers.put(address, expiry); - this.latestExpiry = expiry; - if (LOG.isDebugEnabled()) { -- LOG.debug("Added failed server with address " + address.toString() + " to list caused by " -- + throwable.toString()); -+ LOG.debug( -+ "Added failed server with address " + address + " to list caused by " -+ + throwable.toString()); - } - } - -@@ -58,7 +60,7 @@ public class FailedServers { - * Check if the server should be considered as bad. Clean the old entries of the list. - * @return true if the server is in the failed servers list - */ -- public synchronized boolean isFailedServer(final InetSocketAddress address) { -+ public synchronized boolean isFailedServer(final Address address) { - if (failedServers.isEmpty()) { - return false; - } -@@ -67,15 +69,14 @@ public class FailedServers { - failedServers.clear(); - return false; - } -- String key = address.toString(); -- Long expiry = this.failedServers.get(key); -+ Long expiry = this.failedServers.get(address); - if (expiry == null) { - return false; - } - if (expiry >= now) { - return true; - } else { -- this.failedServers.remove(key); -+ this.failedServers.remove(address); - } - return false; - } -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java -index 708d8d3e528..86be0584329 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java -@@ -21,7 +21,6 @@ import java.io.IOException; - import java.io.OutputStream; - import java.lang.reflect.InvocationTargetException; - import java.net.ConnectException; --import java.net.InetSocketAddress; - import java.net.SocketTimeoutException; - import java.nio.channels.ClosedChannelException; - import java.util.concurrent.TimeoutException; -@@ -34,6 +33,7 @@ import org.apache.hadoop.hbase.exceptions.ClientExceptionsUtil; - import org.apache.hadoop.hbase.exceptions.ConnectionClosedException; - import org.apache.hadoop.hbase.exceptions.ConnectionClosingException; - import org.apache.hadoop.hbase.exceptions.TimeoutIOException; -+import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.util.Bytes; - import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; - import org.apache.hadoop.ipc.RemoteException; -@@ -157,7 +157,7 @@ class IPCUtil { - } - } - -- private static String getCallTarget(InetSocketAddress addr, RegionInfo regionInfo) { -+ private static String getCallTarget(Address addr, RegionInfo regionInfo) { - return "address=" + addr - + (regionInfo != null ? ", region=" + regionInfo.getRegionNameAsString() : ""); - } -@@ -179,7 +179,7 @@ class IPCUtil { - * @return an exception to throw - * @see ClientExceptionsUtil#isConnectionException(Throwable) - */ -- static IOException wrapException(InetSocketAddress addr, RegionInfo regionInfo, Throwable error) { -+ static IOException wrapException(Address addr, RegionInfo regionInfo, Throwable error) { - if (error instanceof ConnectException) { - // connection refused; include the host:port in the error - return (IOException) new ConnectException( -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java -index 6a889678a5e..6a921503570 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java -@@ -24,12 +24,15 @@ import static org.apache.hadoop.hbase.ipc.IPCUtil.setCancelled; - import static org.apache.hadoop.hbase.ipc.IPCUtil.toIOE; - - import java.io.IOException; -+import java.net.InetSocketAddress; -+import java.net.UnknownHostException; - import java.util.concurrent.Executors; - import java.util.concurrent.ScheduledExecutorService; - import java.util.concurrent.ThreadLocalRandom; - import java.util.concurrent.TimeUnit; - import org.apache.hadoop.hbase.ipc.BufferCallBeforeInitHandler.BufferCallEvent; - import org.apache.hadoop.hbase.ipc.HBaseRpcController.CancellationCallback; -+import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.security.NettyHBaseRpcConnectionHeaderHandler; - import org.apache.hadoop.hbase.security.NettyHBaseSaslRpcClientHandler; - import org.apache.hadoop.hbase.security.SaslChallengeDecoder; -@@ -97,7 +100,8 @@ class NettyRpcConnection extends RpcConnection { - - NettyRpcConnection(NettyRpcClient rpcClient, ConnectionId remoteId) throws IOException { - super(rpcClient.conf, AbstractRpcClient.WHEEL_TIMER, remoteId, rpcClient.clusterId, -- rpcClient.userProvider.isHBaseSecurityEnabled(), rpcClient.codec, rpcClient.compressor); -+ rpcClient.userProvider.isHBaseSecurityEnabled(), rpcClient.codec, rpcClient.compressor, -+ rpcClient.metrics); - this.rpcClient = rpcClient; - this.eventLoop = rpcClient.group.next(); - byte[] connectionHeaderPreamble = getConnectionHeaderPreamble(); -@@ -206,8 +210,18 @@ class NettyRpcConnection extends RpcConnection { - Promise saslPromise = ch.eventLoop().newPromise(); - final NettyHBaseSaslRpcClientHandler saslHandler; - try { -+ if (this.metrics != null) { -+ this.metrics.incrNsLookups(); -+ } -+ InetSocketAddress serverAddr = Address.toSocketAddress(remoteId.getAddress()); -+ if (serverAddr.isUnresolved()) { -+ if (this.metrics != null) { -+ this.metrics.incrNsLookupsFailed(); -+ } -+ throw new UnknownHostException(remoteId.getAddress() + " could not be resolved"); -+ } - saslHandler = new NettyHBaseSaslRpcClientHandler(saslPromise, ticket, provider, token, -- serverAddress, securityInfo, rpcClient.fallbackAllowed, this.rpcClient.conf); -+ serverAddr.getAddress(), securityInfo, rpcClient.fallbackAllowed, this.rpcClient.conf); - } catch (IOException e) { - failInit(ch, e); - return; -@@ -268,10 +282,19 @@ class NettyRpcConnection extends RpcConnection { - }); - } - -- private void connect() { -+ private void connect() throws UnknownHostException { - assert eventLoop.inEventLoop(); -- LOG.trace("Connecting to {}", remoteId.address); -- -+ LOG.trace("Connecting to {}", remoteId.getAddress()); -+ if (this.rpcClient.metrics != null) { -+ this.rpcClient.metrics.incrNsLookups(); -+ } -+ InetSocketAddress remoteAddr = Address.toSocketAddress(remoteId.getAddress()); -+ if (remoteAddr.isUnresolved()) { -+ if (this.rpcClient.metrics != null) { -+ this.rpcClient.metrics.incrNsLookupsFailed(); -+ } -+ throw new UnknownHostException(remoteId.getAddress() + " could not be resolved"); -+ } - this.channel = new Bootstrap().group(eventLoop).channel(rpcClient.channelClass) - .option(ChannelOption.TCP_NODELAY, rpcClient.isTcpNoDelay()) - .option(ChannelOption.SO_KEEPALIVE, rpcClient.tcpKeepAlive) -@@ -283,7 +306,7 @@ class NettyRpcConnection extends RpcConnection { - ch.pipeline().addLast(BufferCallBeforeInitHandler.NAME, - new BufferCallBeforeInitHandler()); - } -- }).localAddress(rpcClient.localAddr).remoteAddress(remoteId.address).connect() -+ }).localAddress(rpcClient.localAddr).remoteAddress(remoteAddr).connect() - .addListener(new ChannelFutureListener() { - - @Override -@@ -294,7 +317,7 @@ class NettyRpcConnection extends RpcConnection { - LOG.warn( - "Exception encountered while connecting to the server " + remoteId.getAddress(), ex); - failInit(ch, ex); -- rpcClient.failedServers.addToFailedServers(remoteId.address, future.cause()); -+ rpcClient.failedServers.addToFailedServers(remoteId.getAddress(), future.cause()); - return; - } - ch.writeAndFlush(connectionHeaderPreamble.retainedDuplicate()); -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java -index 1b5f3e0a0eb..6ecff49e52b 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java -@@ -18,7 +18,6 @@ - package org.apache.hadoop.hbase.ipc; - - import java.io.Closeable; --import java.io.IOException; - import org.apache.hadoop.hbase.ServerName; - import org.apache.hadoop.hbase.security.User; - import org.apache.yetus.audience.InterfaceAudience; -@@ -62,10 +61,8 @@ public interface RpcClient extends Closeable { - * @param user which is to use the connection - * @param rpcTimeout default rpc operation timeout - * @return A blocking rpc channel that goes via this rpc client instance. -- * @throws IOException when channel could not be created - */ -- BlockingRpcChannel createBlockingRpcChannel(ServerName sn, User user, int rpcTimeout) -- throws IOException; -+ BlockingRpcChannel createBlockingRpcChannel(ServerName sn, User user, int rpcTimeout); - - /** - * Creates a "channel" that can be used by a protobuf service. Useful setting up protobuf stubs. -@@ -74,8 +71,7 @@ public interface RpcClient extends Closeable { - * @param rpcTimeout default rpc operation timeout - * @return A rpc channel that goes via this rpc client instance. - */ -- RpcChannel createRpcChannel(final ServerName sn, final User user, int rpcTimeout) -- throws IOException; -+ RpcChannel createRpcChannel(final ServerName sn, final User user, int rpcTimeout); - - /** - * Interrupt the connections to the given server. This should be called if the server is known as -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java -index 28a628e0fcf..a3e2bef30c8 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java -@@ -18,11 +18,10 @@ - package org.apache.hadoop.hbase.ipc; - - import java.io.IOException; --import java.net.InetAddress; --import java.net.UnknownHostException; - import java.util.concurrent.TimeUnit; - import org.apache.hadoop.conf.Configuration; - import org.apache.hadoop.hbase.HConstants; -+import org.apache.hadoop.hbase.client.MetricsConnection; - import org.apache.hadoop.hbase.codec.Codec; - import org.apache.hadoop.hbase.security.SecurityInfo; - import org.apache.hadoop.hbase.security.User; -@@ -59,8 +58,6 @@ abstract class RpcConnection { - - protected final Token token; - -- protected final InetAddress serverAddress; -- - protected final SecurityInfo securityInfo; - - protected final int reloginMaxBackoff; // max pause before relogin on sasl failure -@@ -69,6 +66,8 @@ abstract class RpcConnection { - - protected final CompressionCodec compressor; - -+ protected final MetricsConnection metrics; -+ - protected final HashedWheelTimer timeoutTimer; - - protected final Configuration conf; -@@ -83,17 +82,13 @@ abstract class RpcConnection { - protected SaslClientAuthenticationProvider provider; - - protected RpcConnection(Configuration conf, HashedWheelTimer timeoutTimer, ConnectionId remoteId, -- String clusterId, boolean isSecurityEnabled, Codec codec, CompressionCodec compressor) -- throws IOException { -- if (remoteId.getAddress().isUnresolved()) { -- throw new UnknownHostException("unknown host: " + remoteId.getAddress().getHostName()); -- } -- this.serverAddress = remoteId.getAddress().getAddress(); -+ String clusterId, boolean isSecurityEnabled, Codec codec, CompressionCodec compressor, -+ MetricsConnection metrics) throws IOException { - this.timeoutTimer = timeoutTimer; - this.codec = codec; - this.compressor = compressor; - this.conf = conf; -- -+ this.metrics = metrics; - User ticket = remoteId.getTicket(); - this.securityInfo = SecurityInfo.getInfo(remoteId.getServiceName()); - this.useSasl = isSecurityEnabled; -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerTooBusyException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerTooBusyException.java -index 6f6bbf67c54..6c22ca94e42 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerTooBusyException.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerTooBusyException.java -@@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.ipc; - - import java.net.InetSocketAddress; - import org.apache.hadoop.hbase.DoNotRetryIOException; -+import org.apache.hadoop.hbase.net.Address; - import org.apache.yetus.audience.InterfaceAudience; - - /** -@@ -27,7 +28,14 @@ import org.apache.yetus.audience.InterfaceAudience; - @SuppressWarnings("serial") - @InterfaceAudience.Public - public class ServerTooBusyException extends DoNotRetryIOException { -+ -+ public ServerTooBusyException(Address address, long count) { -+ super("Busy Server! " + count + " concurrent RPCs against " + address); -+ } -+ -+ @Deprecated - public ServerTooBusyException(InetSocketAddress address, long count) { - super("Busy Server! " + count + " concurrent RPCs against " + address); - } -+ - } -diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMasterRegistryHedgedReads.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMasterRegistryHedgedReads.java -index 0b307bf2e56..2ff6bd640eb 100644 ---- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMasterRegistryHedgedReads.java -+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMasterRegistryHedgedReads.java -@@ -90,14 +90,12 @@ public class TestMasterRegistryHedgedReads { - } - - @Override -- public BlockingRpcChannel createBlockingRpcChannel(ServerName sn, User user, int rpcTimeout) -- throws IOException { -+ public BlockingRpcChannel createBlockingRpcChannel(ServerName sn, User user, int rpcTimeout) { - throw new UnsupportedOperationException(); - } - - @Override -- public RpcChannel createRpcChannel(ServerName sn, User user, int rpcTimeout) -- throws IOException { -+ public RpcChannel createRpcChannel(ServerName sn, User user, int rpcTimeout) { - return new RpcChannelImpl(); - } - -diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestConnectionId.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestConnectionId.java -index 9b55ecf1743..da962cac0d3 100644 ---- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestConnectionId.java -+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestConnectionId.java -@@ -22,10 +22,10 @@ import static org.junit.Assert.assertFalse; - import static org.junit.Assert.assertNotEquals; - import static org.junit.Assert.assertTrue; - --import java.net.InetSocketAddress; - import org.apache.hadoop.conf.Configuration; - import org.apache.hadoop.hbase.HBaseClassTestRule; - import org.apache.hadoop.hbase.HBaseConfiguration; -+import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.security.User; - import org.apache.hadoop.hbase.testclassification.ClientTests; - import org.apache.hadoop.hbase.testclassification.SmallTests; -@@ -45,7 +45,7 @@ public class TestConnectionId { - private User testUser2 = - User.createUserForTesting(testConfig, "test", new String[] { "testgroup" }); - private String serviceName = "test"; -- private InetSocketAddress address = new InetSocketAddress(999); -+ private Address address = Address.fromParts("localhost", 999); - private ConnectionId connectionId1 = new ConnectionId(testUser1, serviceName, address); - private ConnectionId connectionId2 = new ConnectionId(testUser2, serviceName, address); - -@@ -68,7 +68,7 @@ public class TestConnectionId { - - @Test - public void testToString() { -- String expectedString = "0.0.0.0/0.0.0.0:999/test/test (auth:SIMPLE)"; -+ String expectedString = "localhost:999/test/test (auth:SIMPLE)"; - assertEquals(expectedString, connectionId1.toString()); - } - -diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java -index 3998fa0880f..e119861a30d 100644 ---- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java -+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java -@@ -21,9 +21,9 @@ import static org.hamcrest.CoreMatchers.is; - import static org.hamcrest.MatcherAssert.assertThat; - import static org.junit.Assert.assertEquals; - --import java.net.InetSocketAddress; - import org.apache.hadoop.conf.Configuration; - import org.apache.hadoop.hbase.HBaseClassTestRule; -+import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.testclassification.ClientTests; - import org.apache.hadoop.hbase.testclassification.SmallTests; - import org.apache.log4j.Appender; -@@ -51,7 +51,7 @@ public class TestFailedServersLog { - HBaseClassTestRule.forClass(TestFailedServersLog.class); - - static final int TEST_PORT = 9999; -- private InetSocketAddress addr; -+ private Address addr; - - @Mock - private Appender mockAppender; -@@ -74,7 +74,7 @@ public class TestFailedServersLog { - Throwable nullException = new NullPointerException(); - - FailedServers fs = new FailedServers(new Configuration()); -- addr = new InetSocketAddress(TEST_PORT); -+ addr = Address.fromParts("localhost", TEST_PORT); - - fs.addToFailedServers(addr, nullException); - -diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestIPCUtil.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestIPCUtil.java -index 8bdef8eb4a3..c327896f72a 100644 ---- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestIPCUtil.java -+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestIPCUtil.java -@@ -24,7 +24,6 @@ import static org.junit.Assert.assertTrue; - import java.io.IOException; - import java.lang.reflect.Constructor; - import java.lang.reflect.InvocationTargetException; --import java.net.InetSocketAddress; - import java.util.ArrayList; - import java.util.List; - import java.util.concurrent.CompletableFuture; -@@ -34,6 +33,7 @@ import org.apache.hadoop.hbase.HBaseClassTestRule; - import org.apache.hadoop.hbase.client.RegionInfoBuilder; - import org.apache.hadoop.hbase.exceptions.ClientExceptionsUtil; - import org.apache.hadoop.hbase.exceptions.TimeoutIOException; -+import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.testclassification.ClientTests; - import org.apache.hadoop.hbase.testclassification.SmallTests; - import org.apache.hadoop.hbase.util.FutureUtils; -@@ -101,7 +101,7 @@ public class TestIPCUtil { - for (Class clazz : ClientExceptionsUtil.getConnectionExceptionTypes()) { - exceptions.add(create(clazz)); - } -- InetSocketAddress addr = InetSocketAddress.createUnresolved("127.0.0.1", 12345); -+ Address addr = Address.fromParts("127.0.0.1", 12345); - for (Throwable exception : exceptions) { - if (exception instanceof TimeoutException) { - assertThat(IPCUtil.wrapException(addr, null, exception), -diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyRpcConnection.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyRpcConnection.java -index 6d543573de7..a9c40fd3bb7 100644 ---- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyRpcConnection.java -+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyRpcConnection.java -@@ -26,9 +26,9 @@ import java.io.IOException; - import java.lang.reflect.InvocationTargetException; - import java.lang.reflect.Method; - import java.lang.reflect.Modifier; --import java.net.InetSocketAddress; - import org.apache.hadoop.hbase.HBaseClassTestRule; - import org.apache.hadoop.hbase.HBaseConfiguration; -+import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.security.User; - import org.apache.hadoop.hbase.testclassification.ClientTests; - import org.apache.hadoop.hbase.testclassification.SmallTests; -@@ -59,7 +59,7 @@ public class TestNettyRpcConnection { - public static void setUp() throws IOException { - CLIENT = new NettyRpcClient(HBaseConfiguration.create()); - CONN = new NettyRpcConnection(CLIENT, -- new ConnectionId(User.getCurrent(), "test", new InetSocketAddress("localhost", 1234))); -+ new ConnectionId(User.getCurrent(), "test", Address.fromParts("localhost", 1234))); - } - - @AfterClass -diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java -index 0d5670965f7..7c9f41c9c71 100644 ---- a/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java -+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java -@@ -17,6 +17,8 @@ - */ - package org.apache.hadoop.hbase.net; - -+import java.net.InetSocketAddress; -+ - import org.apache.commons.lang3.StringUtils; - import org.apache.yetus.audience.InterfaceAudience; - -@@ -33,7 +35,7 @@ import org.apache.hbase.thirdparty.com.google.common.net.HostAndPort; - */ - @InterfaceAudience.Public - public class Address implements Comparable
{ -- private HostAndPort hostAndPort; -+ private final HostAndPort hostAndPort; - - private Address(HostAndPort hostAndPort) { - this.hostAndPort = hostAndPort; -@@ -47,6 +49,33 @@ public class Address implements Comparable
{ - return new Address(HostAndPort.fromString(hostnameAndPort)); - } - -+ public static Address fromSocketAddress(InetSocketAddress addr) { -+ return Address.fromParts(addr.getHostString(), addr.getPort()); -+ } -+ -+ public static InetSocketAddress toSocketAddress(Address addr) { -+ return new InetSocketAddress(addr.getHostName(), addr.getPort()); -+ } -+ -+ public static InetSocketAddress[] toSocketAddress(Address[] addrs) { -+ if (addrs == null) { -+ return null; -+ } -+ InetSocketAddress[] result = new InetSocketAddress[addrs.length]; -+ for (int i = 0; i < addrs.length; i++) { -+ result[i] = toSocketAddress(addrs[i]); -+ } -+ return result; -+ } -+ -+ public String getHostName() { -+ return this.hostAndPort.getHost(); -+ } -+ -+ /** -+ * @deprecated Use {@link #getHostName()} instead -+ */ -+ @Deprecated - public String getHostname() { - return this.hostAndPort.getHost(); - } -@@ -66,8 +95,8 @@ public class Address implements Comparable
{ - * otherwise returns same as {@link #toString()}} - */ - public String toStringWithoutDomain() { -- String hostname = getHostname(); -- String[] parts = hostname.split("\\."); -+ String hostname = getHostName(); -+ String [] parts = hostname.split("\\."); - if (parts.length > 1) { - for (String part : parts) { - if (!StringUtils.isNumeric(part)) { -@@ -86,20 +115,21 @@ public class Address implements Comparable
{ - return true; - } - if (other instanceof Address) { -- Address that = (Address) other; -- return this.getHostname().equals(that.getHostname()) && this.getPort() == that.getPort(); -+ Address that = (Address)other; -+ return this.getHostName().equals(that.getHostName()) && -+ this.getPort() == that.getPort(); - } - return false; - } - - @Override - public int hashCode() { -- return this.getHostname().hashCode() ^ getPort(); -+ return this.getHostName().hashCode() ^ getPort(); - } - - @Override - public int compareTo(Address that) { -- int compare = this.getHostname().compareTo(that.getHostname()); -+ int compare = this.getHostName().compareTo(that.getHostName()); - if (compare != 0) { - return compare; - } -diff --git a/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java b/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java -index 3077ddb15f1..0003a451d72 100644 ---- a/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java -+++ b/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java -@@ -107,6 +107,9 @@ public class MemcachedBlockCache implements BlockCache { - // case. - String serverListString = c.get(MEMCACHED_CONFIG_KEY, "localhost:11211"); - String[] servers = serverListString.split(","); -+ // MemcachedClient requires InetSocketAddresses, we have to create them now. Implies any -+ // resolved identities cannot have their address mappings changed while the MemcachedClient -+ // instance is alive. We won't get a chance to trigger re-resolution. - List serverAddresses = new ArrayList<>(servers.length); - for (String s : servers) { - serverAddresses.add(Addressing.createInetSocketAddressFromHostAndPortStr(s)); -diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java -index c045ff87ded..8cb07f6c146 100644 ---- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java -+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java -@@ -129,6 +129,7 @@ import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer; - import org.apache.hadoop.hbase.mob.MobFileCache; - import org.apache.hadoop.hbase.namequeues.NamedQueueRecorder; - import org.apache.hadoop.hbase.namequeues.SlowLogTableOpsChore; -+import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.procedure.RegionServerProcedureManagerHost; - import org.apache.hadoop.hbase.procedure2.RSProcedureCallable; - import org.apache.hadoop.hbase.quotas.FileSystemUtilizationChore; -@@ -317,14 +318,17 @@ public class HRegionServer extends Thread - private final ReentrantReadWriteLock onlineRegionsLock = new ReentrantReadWriteLock(); - - /** -- * Map of encoded region names to the DataNode locations they should be hosted on We store the -- * value as InetSocketAddress since this is used only in HDFS API (create() that takes favored -- * nodes as hints for placing file blocks). We could have used ServerName here as the value class, -- * but we'd need to convert it to InetSocketAddress at some point before the HDFS API call, and it -- * seems a bit weird to store ServerName since ServerName refers to RegionServers and here we -- * really mean DataNode locations. -+ * Map of encoded region names to the DataNode locations they should be hosted on -+ * We store the value as Address since InetSocketAddress is required by the HDFS -+ * API (create() that takes favored nodes as hints for placing file blocks). -+ * We could have used ServerName here as the value class, but we'd need to -+ * convert it to InetSocketAddress at some point before the HDFS API call, and -+ * it seems a bit weird to store ServerName since ServerName refers to RegionServers -+ * and here we really mean DataNode locations. We don't store it as InetSocketAddress -+ * here because the conversion on demand from Address to InetSocketAddress will -+ * guarantee the resolution results will be fresh when we need it. - */ -- private final Map regionFavoredNodesMap = new ConcurrentHashMap<>(); -+ private final Map regionFavoredNodesMap = new ConcurrentHashMap<>(); - - private LeaseManager leaseManager; - -@@ -3453,25 +3457,27 @@ public class HRegionServer extends Thread - - @Override - public void updateRegionFavoredNodesMapping(String encodedRegionName, -- List favoredNodes) { -- InetSocketAddress[] addr = new InetSocketAddress[favoredNodes.size()]; -+ List favoredNodes) { -+ Address[] addr = new Address[favoredNodes.size()]; - // Refer to the comment on the declaration of regionFavoredNodesMap on why -- // it is a map of region name to InetSocketAddress[] -+ // it is a map of region name to Address[] - for (int i = 0; i < favoredNodes.size(); i++) { -- addr[i] = InetSocketAddress.createUnresolved(favoredNodes.get(i).getHostName(), -- favoredNodes.get(i).getPort()); -+ addr[i] = Address.fromParts(favoredNodes.get(i).getHostName(), -+ favoredNodes.get(i).getPort()); - } - regionFavoredNodesMap.put(encodedRegionName, addr); - } - - /** -- * Return the favored nodes for a region given its encoded name. Look at the comment around -- * {@link #regionFavoredNodesMap} on why it is InetSocketAddress[] -+ * Return the favored nodes for a region given its encoded name. Look at the -+ * comment around {@link #regionFavoredNodesMap} on why we convert to InetSocketAddress[] -+ * here. -+ * @param encodedRegionName - * @return array of favored locations - */ - @Override - public InetSocketAddress[] getFavoredNodesForRegion(String encodedRegionName) { -- return regionFavoredNodesMap.get(encodedRegionName); -+ return Address.toSocketAddress(regionFavoredNodesMap.get(encodedRegionName)); - } - - @Override -diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCIBadHostname.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCIBadHostname.java -deleted file mode 100644 -index c148efe2fef..00000000000 ---- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCIBadHostname.java -+++ /dev/null -@@ -1,97 +0,0 @@ --/* -- * Licensed to the Apache Software Foundation (ASF) under one -- * or more contributor license agreements. See the NOTICE file -- * distributed with this work for additional information -- * regarding copyright ownership. The ASF licenses this file -- * to you under the Apache License, Version 2.0 (the -- * "License"); you may not use this file except in compliance -- * with the License. You may obtain a copy of the License at -- * -- * http://www.apache.org/licenses/LICENSE-2.0 -- * -- * Unless required by applicable law or agreed to in writing, software -- * distributed under the License is distributed on an "AS IS" BASIS, -- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -- * See the License for the specific language governing permissions and -- * limitations under the License. -- */ --package org.apache.hadoop.hbase.client; -- --import static org.junit.Assert.fail; -- --import java.net.UnknownHostException; --import org.apache.hadoop.hbase.HBaseClassTestRule; --import org.apache.hadoop.hbase.HBaseTestingUtility; --import org.apache.hadoop.hbase.HConstants; --import org.apache.hadoop.hbase.ServerName; --import org.apache.hadoop.hbase.testclassification.ClientTests; --import org.apache.hadoop.hbase.testclassification.MediumTests; --import org.junit.AfterClass; --import org.junit.BeforeClass; --import org.junit.ClassRule; --import org.junit.Test; --import org.junit.experimental.categories.Category; -- --/** -- * Tests that we fail fast when hostname resolution is not working and do not cache unresolved -- * InetSocketAddresses. -- */ --@Category({ MediumTests.class, ClientTests.class }) --public class TestCIBadHostname { -- -- @ClassRule -- public static final HBaseClassTestRule CLASS_RULE = -- HBaseClassTestRule.forClass(TestCIBadHostname.class); -- -- private static HBaseTestingUtility testUtil; -- private static ConnectionImplementation conn; -- -- @BeforeClass -- public static void setupBeforeClass() throws Exception { -- testUtil = HBaseTestingUtility.createLocalHTU(); -- testUtil.startMiniCluster(); -- conn = (ConnectionImplementation) testUtil.getConnection(); -- } -- -- @AfterClass -- public static void teardownAfterClass() throws Exception { -- conn.close(); -- testUtil.shutdownMiniCluster(); -- } -- -- @Test(expected = UnknownHostException.class) -- public void testGetAdminBadHostname() throws Exception { -- // verify that we can get an instance with the cluster hostname -- ServerName master = testUtil.getHBaseCluster().getMaster().getServerName(); -- try { -- conn.getAdmin(master); -- } catch (UnknownHostException uhe) { -- fail("Obtaining admin to the cluster master should have succeeded"); -- } -- -- // test that we fail to get a client to an unresolvable hostname, which -- // means it won't be cached -- ServerName badHost = ServerName.valueOf("unknownhost.invalid:" + HConstants.DEFAULT_MASTER_PORT, -- System.currentTimeMillis()); -- conn.getAdmin(badHost); -- fail("Obtaining admin to unresolvable hostname should have failed"); -- } -- -- @Test(expected = UnknownHostException.class) -- public void testGetClientBadHostname() throws Exception { -- // verify that we can get an instance with the cluster hostname -- ServerName rs = testUtil.getHBaseCluster().getRegionServer(0).getServerName(); -- try { -- conn.getClient(rs); -- } catch (UnknownHostException uhe) { -- fail("Obtaining client to the cluster regionserver should have succeeded"); -- } -- -- // test that we fail to get a client to an unresolvable hostname, which -- // means it won't be cached -- ServerName badHost = ServerName.valueOf( -- "unknownhost.invalid:" + HConstants.DEFAULT_REGIONSERVER_PORT, System.currentTimeMillis()); -- conn.getAdmin(badHost); -- fail("Obtaining client to unresolvable hostname should have failed"); -- } --} -diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java -index 86b981276f6..8a5a0d1cc84 100644 ---- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java -+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java -@@ -20,10 +20,8 @@ package org.apache.hadoop.hbase.client; - import static org.junit.Assert.assertFalse; - import static org.junit.Assert.assertTrue; - --import java.net.InetSocketAddress; - import java.net.SocketAddress; - import java.net.SocketTimeoutException; --import java.net.UnknownHostException; - import java.util.Random; - import java.util.concurrent.ThreadLocalRandom; - import java.util.concurrent.atomic.AtomicInteger; -@@ -38,6 +36,7 @@ import org.apache.hadoop.hbase.ipc.AbstractRpcClient; - import org.apache.hadoop.hbase.ipc.BlockingRpcClient; - import org.apache.hadoop.hbase.ipc.HBaseRpcController; - import org.apache.hadoop.hbase.ipc.RpcClientFactory; -+import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.security.User; - import org.apache.hadoop.hbase.testclassification.ClientTests; - import org.apache.hadoop.hbase.testclassification.MediumTests; -@@ -145,14 +144,12 @@ public class TestClientTimeouts { - - // Return my own instance, one that does random timeouts - @Override -- public BlockingRpcChannel createBlockingRpcChannel(ServerName sn, User ticket, int rpcTimeout) -- throws UnknownHostException { -+ public BlockingRpcChannel createBlockingRpcChannel(ServerName sn, User ticket, int rpcTimeout) { - return new RandomTimeoutBlockingRpcChannel(this, sn, ticket, rpcTimeout); - } - - @Override -- public RpcChannel createRpcChannel(ServerName sn, User ticket, int rpcTimeout) -- throws UnknownHostException { -+ public RpcChannel createRpcChannel(ServerName sn, User ticket, int rpcTimeout) { - return new RandomTimeoutRpcChannel(this, sn, ticket, rpcTimeout); - } - } -@@ -167,8 +164,8 @@ public class TestClientTimeouts { - private static AtomicInteger invokations = new AtomicInteger(); - - RandomTimeoutBlockingRpcChannel(final BlockingRpcClient rpcClient, final ServerName sn, -- final User ticket, final int rpcTimeout) { -- super(rpcClient, new InetSocketAddress(sn.getHostname(), sn.getPort()), ticket, rpcTimeout); -+ final User ticket, final int rpcTimeout) { -+ super(rpcClient, Address.fromParts(sn.getHostname(), sn.getPort()), ticket, rpcTimeout); - } - - @Override -@@ -188,8 +185,8 @@ public class TestClientTimeouts { - private static class RandomTimeoutRpcChannel extends AbstractRpcClient.RpcChannelImplementation { - - RandomTimeoutRpcChannel(AbstractRpcClient rpcClient, ServerName sn, User ticket, -- int rpcTimeout) throws UnknownHostException { -- super(rpcClient, new InetSocketAddress(sn.getHostname(), sn.getPort()), ticket, rpcTimeout); -+ int rpcTimeout) { -+ super(rpcClient, Address.fromParts(sn.getHostname(), sn.getPort()), ticket, rpcTimeout); - } - - @Override -diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseClient.java -index 843b7dfa354..9a7af2d490c 100644 ---- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseClient.java -+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseClient.java -@@ -17,9 +17,9 @@ - */ - package org.apache.hadoop.hbase.ipc; - --import java.net.InetSocketAddress; - import org.apache.hadoop.conf.Configuration; - import org.apache.hadoop.hbase.HBaseClassTestRule; -+import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.testclassification.RPCTests; - import org.apache.hadoop.hbase.testclassification.SmallTests; - import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -@@ -42,11 +42,11 @@ public class TestHBaseClient { - FailedServers fs = new FailedServers(new Configuration()); - Throwable testThrowable = new Throwable();// throwable already tested in TestFailedServers.java - -- InetSocketAddress ia = InetSocketAddress.createUnresolved("bad", 12); -- // same server as ia -- InetSocketAddress ia2 = InetSocketAddress.createUnresolved("bad", 12); -- InetSocketAddress ia3 = InetSocketAddress.createUnresolved("badtoo", 12); -- InetSocketAddress ia4 = InetSocketAddress.createUnresolved("badtoo", 13); -+ Address ia = Address.fromParts("bad", 12); -+ // same server as ia -+ Address ia2 = Address.fromParts("bad", 12); -+ Address ia3 = Address.fromParts("badtoo", 12); -+ Address ia4 = Address.fromParts("badtoo", 13); - - Assert.assertFalse(fs.isFailedServer(ia)); - -diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java -index cd9d829b60c..c2b6dac1086 100644 ---- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java -+++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java -@@ -25,6 +25,7 @@ import java.io.OutputStreamWriter; - import java.io.PrintWriter; - import java.net.InetSocketAddress; - import java.net.Socket; -+import java.net.UnknownHostException; - import java.nio.charset.StandardCharsets; - import java.util.ArrayList; - import java.util.Arrays; -@@ -1633,10 +1634,12 @@ public final class ZKUtil { - String host = sp[0]; - int port = sp.length > 1 ? Integer.parseInt(sp[1]) : HConstants.DEFAULT_ZOOKEEPER_CLIENT_PORT; - -- InetSocketAddress sockAddr = new InetSocketAddress(host, port); - try (Socket socket = new Socket()) { -+ InetSocketAddress sockAddr = new InetSocketAddress(host, port); -+ if (sockAddr.isUnresolved()) { -+ throw new UnknownHostException(host + " cannot be resolved"); -+ } - socket.connect(sockAddr, timeout); -- - socket.setSoTimeout(timeout); - try ( - PrintWriter out = new PrintWriter(new BufferedWriter( --- -2.42.1 - diff --git a/hbase/stackable/patches/2.4.17/003-HBASE-25336-2.4.17.patch b/hbase/stackable/patches/2.4.17/003-HBASE-25336-2.4.17.patch deleted file mode 100644 index 6601057e2..000000000 --- a/hbase/stackable/patches/2.4.17/003-HBASE-25336-2.4.17.patch +++ /dev/null @@ -1,307 +0,0 @@ -From f14d090f652a81deb72672f03f85af2bdf750548 Mon Sep 17 00:00:00 2001 -From: Duo Zhang -Date: Mon, 7 Dec 2020 21:49:04 +0800 -Subject: [PATCH] HBASE-25336 Use Address instead of InetSocketAddress in - RpcClient implementation (#2716) - -Signed-off-by: Guanghao Zhang ---- - .../hadoop/hbase/ipc/AbstractRpcClient.java | 60 ++++--------------- - .../hbase/ipc/BlockingRpcConnection.java | 22 +------ - .../hadoop/hbase/ipc/NettyRpcConnection.java | 25 +------- - .../hadoop/hbase/ipc/RpcConnection.java | 24 +++++++- - 4 files changed, 36 insertions(+), 95 deletions(-) - -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java -index f7a041e52b..de00963bca 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java -@@ -21,9 +21,7 @@ import static org.apache.hadoop.hbase.ipc.IPCUtil.toIOE; - import static org.apache.hadoop.hbase.ipc.IPCUtil.wrapException; - - import java.io.IOException; --import java.net.InetSocketAddress; - import java.net.SocketAddress; --import java.net.UnknownHostException; - import java.util.Collection; - import java.util.concurrent.Executors; - import java.util.concurrent.ScheduledExecutorService; -@@ -320,7 +318,7 @@ public abstract class AbstractRpcClient implements RpcC - * @return A pair with the Message response and the Cell data (if any). - */ - private Message callBlockingMethod(Descriptors.MethodDescriptor md, HBaseRpcController hrc, -- Message param, Message returnType, final User ticket, final InetSocketAddress isa) -+ Message param, Message returnType, final User ticket, final Address isa) - throws ServiceException { - BlockingRpcCallback done = new BlockingRpcCallback<>(); - callMethod(md, hrc, param, returnType, ticket, isa, done); -@@ -393,7 +391,7 @@ public abstract class AbstractRpcClient implements RpcC - - Call callMethod(final Descriptors.MethodDescriptor md, final HBaseRpcController hrc, - final Message param, Message returnType, final User ticket, -- final InetSocketAddress inetAddr, final RpcCallback callback) { -+ final Address addr, final RpcCallback callback) { - final MetricsConnection.CallStats cs = MetricsConnection.newCallStats(); - cs.setStartTime(EnvironmentEdgeManager.currentTime()); - -@@ -407,7 +405,6 @@ public abstract class AbstractRpcClient implements RpcC - cs.setNumActionsPerServer(numActions); - } - -- final Address addr = Address.fromSocketAddress(inetAddr); - final AtomicInteger counter = concurrentCounterCache.getUnchecked(addr); - Call call = new Call(nextCallId(), md, param, hrc.cellScanner(), returnType, - hrc.getCallTimeout(), hrc.getPriority(), new RpcCallback() { -@@ -524,13 +521,6 @@ public abstract class AbstractRpcClient implements RpcC - - protected final Address addr; - -- // We cache the resolved InetSocketAddress for the channel so we do not do a DNS lookup -- // per method call on the channel. If the remote target is removed or reprovisioned and -- // its identity changes a new channel with a newly resolved InetSocketAddress will be -- // created as part of retry, so caching here is fine. -- // Normally, caching an InetSocketAddress is an anti-pattern. -- protected InetSocketAddress isa; -- - protected final AbstractRpcClient rpcClient; - - protected final User ticket; -@@ -580,23 +570,9 @@ public abstract class AbstractRpcClient implements RpcC - - @Override - public Message callBlockingMethod(Descriptors.MethodDescriptor md, RpcController controller, -- Message param, Message returnType) throws ServiceException { -- // Look up remote address upon first call -- if (isa == null) { -- if (this.rpcClient.metrics != null) { -- this.rpcClient.metrics.incrNsLookups(); -- } -- isa = Address.toSocketAddress(addr); -- if (isa.isUnresolved()) { -- if (this.rpcClient.metrics != null) { -- this.rpcClient.metrics.incrNsLookupsFailed(); -- } -- isa = null; -- throw new ServiceException(new UnknownHostException(addr + " could not be resolved")); -- } -- } -- return rpcClient.callBlockingMethod(md, configureRpcController(controller), -- param, returnType, ticket, isa); -+ Message param, Message returnType) throws ServiceException { -+ return rpcClient.callBlockingMethod(md, configureRpcController(controller), param, returnType, -+ ticket, addr); - } - } - -@@ -611,29 +587,13 @@ public abstract class AbstractRpcClient implements RpcC - } - - @Override -- public void callMethod(Descriptors.MethodDescriptor md, RpcController controller, -- Message param, Message returnType, RpcCallback done) { -- HBaseRpcController configuredController = -- configureRpcController(Preconditions.checkNotNull(controller, -- "RpcController can not be null for async rpc call")); -- // Look up remote address upon first call -- if (isa == null || isa.isUnresolved()) { -- if (this.rpcClient.metrics != null) { -- this.rpcClient.metrics.incrNsLookups(); -- } -- isa = Address.toSocketAddress(addr); -- if (isa.isUnresolved()) { -- if (this.rpcClient.metrics != null) { -- this.rpcClient.metrics.incrNsLookupsFailed(); -- } -- isa = null; -- controller.setFailed(addr + " could not be resolved"); -- return; -- } -- } -+ public void callMethod(Descriptors.MethodDescriptor md, RpcController controller, Message param, -+ Message returnType, RpcCallback done) { -+ HBaseRpcController configuredController = configureRpcController( -+ Preconditions.checkNotNull(controller, "RpcController can not be null for async rpc call")); - // This method does not throw any exceptions, so the caller must provide a - // HBaseRpcController which is used to pass the exceptions. -- this.rpcClient.callMethod(md, configuredController, param, returnType, ticket, isa, done); -+ this.rpcClient.callMethod(md, configuredController, param, returnType, ticket, addr, done); - } - } - } -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java -index c623004f13..9b0605d37d 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java -@@ -35,7 +35,6 @@ import java.io.OutputStream; - import java.net.InetSocketAddress; - import java.net.Socket; - import java.net.SocketTimeoutException; --import java.net.UnknownHostException; - import java.security.PrivilegedExceptionAction; - import java.util.ArrayDeque; - import java.util.Locale; -@@ -51,7 +50,6 @@ import org.apache.hadoop.hbase.exceptions.ConnectionClosingException; - import org.apache.hadoop.hbase.io.ByteArrayOutputStream; - import org.apache.hadoop.hbase.ipc.HBaseRpcController.CancellationCallback; - import org.apache.hadoop.hbase.log.HBaseMarkers; --import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.security.HBaseSaslRpcClient; - import org.apache.hadoop.hbase.security.SaslUtil; - import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection; -@@ -257,16 +255,7 @@ class BlockingRpcConnection extends RpcConnection implements Runnable { - if (this.rpcClient.localAddr != null) { - this.socket.bind(this.rpcClient.localAddr); - } -- if (this.rpcClient.metrics != null) { -- this.rpcClient.metrics.incrNsLookups(); -- } -- InetSocketAddress remoteAddr = Address.toSocketAddress(remoteId.getAddress()); -- if (remoteAddr.isUnresolved()) { -- if (this.rpcClient.metrics != null) { -- this.rpcClient.metrics.incrNsLookupsFailed(); -- } -- throw new UnknownHostException(remoteId.getAddress() + " could not be resolved"); -- } -+ InetSocketAddress remoteAddr = getRemoteInetAddress(rpcClient.metrics); - NetUtils.connect(this.socket, remoteAddr, this.rpcClient.connectTO); - this.socket.setSoTimeout(this.rpcClient.readTO); - return; -@@ -374,15 +363,8 @@ class BlockingRpcConnection extends RpcConnection implements Runnable { - if (this.metrics != null) { - this.metrics.incrNsLookups(); - } -- InetSocketAddress serverAddr = Address.toSocketAddress(remoteId.getAddress()); -- if (serverAddr.isUnresolved()) { -- if (this.metrics != null) { -- this.metrics.incrNsLookupsFailed(); -- } -- throw new UnknownHostException(remoteId.getAddress() + " could not be resolved"); -- } - saslRpcClient = new HBaseSaslRpcClient(this.rpcClient.conf, provider, token, -- serverAddr.getAddress(), securityInfo, this.rpcClient.fallbackAllowed, -+ socket.getInetAddress(), securityInfo, this.rpcClient.fallbackAllowed, - this.rpcClient.conf.get("hbase.rpc.protection", - QualityOfProtection.AUTHENTICATION.name().toLowerCase(Locale.ROOT)), - this.rpcClient.conf.getBoolean(CRYPTO_AES_ENABLED_KEY, CRYPTO_AES_ENABLED_DEFAULT)); -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java -index 6a92150357..129773fce7 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java -@@ -32,7 +32,6 @@ import java.util.concurrent.ThreadLocalRandom; - import java.util.concurrent.TimeUnit; - import org.apache.hadoop.hbase.ipc.BufferCallBeforeInitHandler.BufferCallEvent; - import org.apache.hadoop.hbase.ipc.HBaseRpcController.CancellationCallback; --import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.security.NettyHBaseRpcConnectionHeaderHandler; - import org.apache.hadoop.hbase.security.NettyHBaseSaslRpcClientHandler; - import org.apache.hadoop.hbase.security.SaslChallengeDecoder; -@@ -210,18 +209,9 @@ class NettyRpcConnection extends RpcConnection { - Promise saslPromise = ch.eventLoop().newPromise(); - final NettyHBaseSaslRpcClientHandler saslHandler; - try { -- if (this.metrics != null) { -- this.metrics.incrNsLookups(); -- } -- InetSocketAddress serverAddr = Address.toSocketAddress(remoteId.getAddress()); -- if (serverAddr.isUnresolved()) { -- if (this.metrics != null) { -- this.metrics.incrNsLookupsFailed(); -- } -- throw new UnknownHostException(remoteId.getAddress() + " could not be resolved"); -- } - saslHandler = new NettyHBaseSaslRpcClientHandler(saslPromise, ticket, provider, token, -- serverAddr.getAddress(), securityInfo, rpcClient.fallbackAllowed, this.rpcClient.conf); -+ ((InetSocketAddress) ch.remoteAddress()).getAddress(), securityInfo, -+ rpcClient.fallbackAllowed, this.rpcClient.conf); - } catch (IOException e) { - failInit(ch, e); - return; -@@ -285,16 +275,7 @@ class NettyRpcConnection extends RpcConnection { - private void connect() throws UnknownHostException { - assert eventLoop.inEventLoop(); - LOG.trace("Connecting to {}", remoteId.getAddress()); -- if (this.rpcClient.metrics != null) { -- this.rpcClient.metrics.incrNsLookups(); -- } -- InetSocketAddress remoteAddr = Address.toSocketAddress(remoteId.getAddress()); -- if (remoteAddr.isUnresolved()) { -- if (this.rpcClient.metrics != null) { -- this.rpcClient.metrics.incrNsLookupsFailed(); -- } -- throw new UnknownHostException(remoteId.getAddress() + " could not be resolved"); -- } -+ InetSocketAddress remoteAddr = getRemoteInetAddress(rpcClient.metrics); - this.channel = new Bootstrap().group(eventLoop).channel(rpcClient.channelClass) - .option(ChannelOption.TCP_NODELAY, rpcClient.isTcpNoDelay()) - .option(ChannelOption.SO_KEEPALIVE, rpcClient.tcpKeepAlive) -diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java -index a3e2bef30c..912fa4fb06 100644 ---- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java -+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java -@@ -18,11 +18,14 @@ - package org.apache.hadoop.hbase.ipc; - - import java.io.IOException; -+import java.net.InetSocketAddress; -+import java.net.UnknownHostException; - import java.util.concurrent.TimeUnit; - import org.apache.hadoop.conf.Configuration; - import org.apache.hadoop.hbase.HConstants; - import org.apache.hadoop.hbase.client.MetricsConnection; - import org.apache.hadoop.hbase.codec.Codec; -+import org.apache.hadoop.hbase.net.Address; - import org.apache.hadoop.hbase.security.SecurityInfo; - import org.apache.hadoop.hbase.security.User; - import org.apache.hadoop.hbase.security.provider.SaslClientAuthenticationProvider; -@@ -122,7 +125,7 @@ abstract class RpcConnection { - this.remoteId = remoteId; - } - -- protected void scheduleTimeoutTask(final Call call) { -+ protected final void scheduleTimeoutTask(final Call call) { - if (call.timeout > 0) { - call.timeoutTask = timeoutTimer.newTimeout(new TimerTask() { - -@@ -137,7 +140,7 @@ abstract class RpcConnection { - } - } - -- protected byte[] getConnectionHeaderPreamble() { -+ protected final byte[] getConnectionHeaderPreamble() { - // Assemble the preamble up in a buffer first and then send it. Writing individual elements, - // they are getting sent across piecemeal according to wireshark and then server is messing - // up the reading on occasion (the passed in stream is not buffered yet). -@@ -153,7 +156,7 @@ abstract class RpcConnection { - return preamble; - } - -- protected ConnectionHeader getConnectionHeader() { -+ protected final ConnectionHeader getConnectionHeader() { - final ConnectionHeader.Builder builder = ConnectionHeader.newBuilder(); - builder.setServiceName(remoteId.getServiceName()); - final UserInformation userInfoPB = provider.getUserInfo(remoteId.ticket); -@@ -176,6 +179,21 @@ abstract class RpcConnection { - return builder.build(); - } - -+ protected final InetSocketAddress getRemoteInetAddress(MetricsConnection metrics) -+ throws UnknownHostException { -+ if (metrics != null) { -+ metrics.incrNsLookups(); -+ } -+ InetSocketAddress remoteAddr = Address.toSocketAddress(remoteId.getAddress()); -+ if (remoteAddr.isUnresolved()) { -+ if (metrics != null) { -+ metrics.incrNsLookupsFailed(); -+ } -+ throw new UnknownHostException(remoteId.getAddress() + " could not be resolved"); -+ } -+ return remoteAddr; -+ } -+ - protected abstract void callTimeout(Call call); - - public ConnectionId remoteId() { --- -2.40.1 - diff --git a/hbase/stackable/patches/2.4.17/004-HBASE-27103-2.4.17.patch b/hbase/stackable/patches/2.4.17/004-HBASE-27103-2.4.17.patch deleted file mode 100644 index 7e1f7a1fe..000000000 --- a/hbase/stackable/patches/2.4.17/004-HBASE-27103-2.4.17.patch +++ /dev/null @@ -1,31 +0,0 @@ -Subject: [PATCH] HBASE-27103 & HBASE-27065 Fix license validation warning for leveldbjni-all ---- -Index: hbase-resource-bundle/src/main/resources/supplemental-models.xml -IDEA additional info: -Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP -<+>UTF-8 -=================================================================== -diff --git a/hbase-resource-bundle/src/main/resources/supplemental-models.xml b/hbase-resource-bundle/src/main/resources/supplemental-models.xml ---- a/hbase-resource-bundle/src/main/resources/supplemental-models.xml (revision 35781955c09db984f87c239c7a0dba900a16eb60) -+++ b/hbase-resource-bundle/src/main/resources/supplemental-models.xml (date 1702637740114) -@@ -2316,6 +2316,20 @@ - - - -+ -+ -+ org.openlabtesting.leveldbjni -+ leveldbjni-all -+ -+ -+ -+ BSD 3-Clause License -+ http://www.opensource.org/licenses/BSD-3-Clause -+ repo -+ -+ -+ -+ - - - diff --git a/hbase/stackable/patches/2.4.17/005-HBASE-28242-2.4.17.patch b/hbase/stackable/patches/2.4.17/005-HBASE-28242-2.4.17.patch deleted file mode 100644 index b0230cb78..000000000 --- a/hbase/stackable/patches/2.4.17/005-HBASE-28242-2.4.17.patch +++ /dev/null @@ -1,376 +0,0 @@ -diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ProfileServlet.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ProfileServlet.java -index 4e30484384..cc67974759 100644 ---- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ProfileServlet.java -+++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/ProfileServlet.java -@@ -25,9 +25,11 @@ import java.util.concurrent.TimeUnit; - import java.util.concurrent.atomic.AtomicInteger; - import java.util.concurrent.locks.Lock; - import java.util.concurrent.locks.ReentrantLock; -+ - import javax.servlet.http.HttpServlet; - import javax.servlet.http.HttpServletRequest; - import javax.servlet.http.HttpServletResponse; -+ - import org.apache.hadoop.hbase.util.ProcessUtils; - import org.apache.yetus.audience.InterfaceAudience; - import org.slf4j.Logger; -@@ -37,23 +39,60 @@ import org.apache.hbase.thirdparty.com.google.common.base.Joiner; - - /** - * Servlet that runs async-profiler as web-endpoint. Following options from async-profiler can be -- * specified as query paramater. // -e event profiling event: cpu|alloc|lock|cache-misses etc. // -d -- * duration run profiling for 'duration' seconds (integer) // -i interval sampling interval in -- * nanoseconds (long) // -j jstackdepth maximum Java stack depth (integer) // -b bufsize frame -- * buffer size (long) // -t profile different threads separately // -s simple class names instead of -- * FQN // -o fmt[,fmt...] output format: summary|traces|flat|collapsed|svg|tree|jfr|html // --width -- * px SVG width pixels (integer) // --height px SVG frame height pixels (integer) // --minwidth px -- * skip frames smaller than px (double) // --reverse generate stack-reversed FlameGraph / Call tree -- * Example: - To collect 30 second CPU profile of current process (returns FlameGraph svg) curl -- * "http://localhost:10002/prof" - To collect 1 minute CPU profile of current process and output in -- * tree format (html) curl "http://localhost:10002/prof?output=tree&duration=60" - To collect 30 -- * second heap allocation profile of current process (returns FlameGraph svg) curl -- * "http://localhost:10002/prof?event=alloc" - To collect lock contention profile of current process -- * (returns FlameGraph svg) curl "http://localhost:10002/prof?event=lock" Following event types are -- * supported (default is 'cpu') (NOTE: not all OS'es support all events) // Perf events: // cpu // -- * page-faults // context-switches // cycles // instructions // cache-references // cache-misses // -- * branches // branch-misses // bus-cycles // L1-dcache-load-misses // LLC-load-misses // -- * dTLB-load-misses // mem:breakpoint // trace:tracepoint // Java events: // alloc // lock -+ * specified as query parameter. -+ *
    -+ *
  • -e event profiling event: cpu|alloc|lock|cache-misses etc.
  • -+ *
  • -d duration run profiling for 'duration' seconds (integer), default 10s
  • -+ *
  • -i interval sampling interval in nanoseconds (long), default 10ms
  • -+ *
  • -j jstackdepth maximum Java stack depth (integer), default 2048
  • -+ *
  • -t profile different threads separately
  • -+ *
  • -s simple class names instead of FQN
  • -+ *
  • -g print method signatures
  • -+ *
  • -a annotate Java methods
  • -+ *
  • -l prepend library names
  • -+ *
  • -o fmt output format: flat|traces|collapsed|flamegraph|tree|jfr
  • -+ *
  • --minwidth pct skip frames smaller than pct% (double)
  • -+ *
  • --reverse generate stack-reversed FlameGraph / Call tree
  • -+ *
-+ * Example: -+ *
    -+ *
  • To collect 30 second CPU profile of current process (returns FlameGraph svg): -+ * {@code curl http://localhost:10002/prof"}
  • -+ *
  • To collect 1 minute CPU profile of current process and output in tree format (html) -+ * {@code curl "http://localhost:10002/prof?output=tree&duration=60"}
  • -+ *
  • To collect 30 second heap allocation profile of current process (returns FlameGraph): -+ * {@code curl "http://localhost:10002/prof?event=alloc"}
  • -+ *
  • To collect lock contention profile of current process (returns FlameGraph): -+ * {@code curl "http://localhost:10002/prof?event=lock"}
  • -+ *
-+ * Following event types are supported (default is 'cpu') (NOTE: not all OS'es support all -+ * events).
-+ * Basic events: -+ *
    -+ *
  • cpu
  • -+ *
  • alloc
  • -+ *
  • lock
  • -+ *
  • wall
  • -+ *
  • itimer
  • -+ *
-+ * Perf events: -+ *
    -+ *
  • L1-dcache-load-misses
  • -+ *
  • LLC-load-misses
  • -+ *
  • branch-instructions
  • -+ *
  • branch-misses
  • -+ *
  • bus-cycles
  • -+ *
  • cache-misses
  • -+ *
  • cache-references
  • -+ *
  • context-switches
  • -+ *
  • cpu
  • -+ *
  • cycles
  • -+ *
  • dTLB-load-misses
  • -+ *
  • instructions
  • -+ *
  • mem:breakpoint
  • -+ *
  • page-faults
  • -+ *
  • trace:tracepoint
  • -+ *
- */ - @InterfaceAudience.Private - public class ProfileServlet extends HttpServlet { -@@ -76,19 +115,21 @@ public class ProfileServlet extends HttpServlet { - CPU("cpu"), - ALLOC("alloc"), - LOCK("lock"), -- PAGE_FAULTS("page-faults"), -+ WALL("wall"), -+ ITIMER("itimer"), -+ BRANCH_INSTRUCTIONS("branch-instructions"), -+ BRANCH_MISSES("branch-misses"), -+ BUS_CYCLES("bus-cycles"), -+ CACHE_MISSES("cache-misses"), -+ CACHE_REFERENCES("cache-references"), - CONTEXT_SWITCHES("context-switches"), - CYCLES("cycles"), -+ DTLB_LOAD_MISSES("dTLB-load-misses"), - INSTRUCTIONS("instructions"), -- CACHE_REFERENCES("cache-references"), -- CACHE_MISSES("cache-misses"), -- BRANCHES("branches"), -- BRANCH_MISSES("branch-misses"), -- BUS_CYCLES("bus-cycles"), - L1_DCACHE_LOAD_MISSES("L1-dcache-load-misses"), - LLC_LOAD_MISSES("LLC-load-misses"), -- DTLB_LOAD_MISSES("dTLB-load-misses"), - MEM_BREAKPOINT("mem:breakpoint"), -+ PAGE_FAULTS("page-faults"), - TRACE_TRACEPOINT("trace:tracepoint"),; - - private final String internalName; -@@ -97,11 +138,11 @@ public class ProfileServlet extends HttpServlet { - this.internalName = internalName; - } - -- public String getInternalName() { -+ String getInternalName() { - return internalName; - } - -- public static Event fromInternalName(final String name) { -+ static Event fromInternalName(final String name) { - for (Event event : values()) { - if (event.getInternalName().equalsIgnoreCase(name)) { - return event; -@@ -112,30 +153,26 @@ public class ProfileServlet extends HttpServlet { - } - } - -- enum Output { -- SUMMARY, -- TRACES, -- FLAT, -+ private enum Output { - COLLAPSED, -- // No SVG in 2.x asyncprofiler. -- SVG, -- TREE, -+ FLAMEGRAPH, -+ FLAT, - JFR, -- // In 2.x asyncprofiler, this is how you get flamegraphs. -- HTML -+ TRACES, -+ TREE - } - - @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "SE_TRANSIENT_FIELD_NOT_RESTORED", - justification = "This class is never serialized nor restored.") -- private transient Lock profilerLock = new ReentrantLock(); -+ private final transient Lock profilerLock = new ReentrantLock(); - private transient volatile Process process; -- private String asyncProfilerHome; -+ private final String asyncProfilerHome; - private Integer pid; - - public ProfileServlet() { - this.asyncProfilerHome = getAsyncProfilerHome(); - this.pid = ProcessUtils.getPid(); -- LOG.info("Servlet process PID: " + pid + " asyncProfilerHome: " + asyncProfilerHome); -+ LOG.info("Servlet process PID: {} asyncProfilerHome: {}", pid, asyncProfilerHome); - } - - @Override -@@ -154,9 +191,9 @@ public class ProfileServlet extends HttpServlet { - setResponseHeader(resp); - resp.getWriter() - .write("ASYNC_PROFILER_HOME env is not set.\n\n" -- + "Please ensure the prerequsites for the Profiler Servlet have been installed and the\n" -+ + "Please ensure the prerequisites for the Profiler Servlet have been installed and the\n" - + "environment is properly configured. For more information please see\n" -- + "http://hbase.apache.org/book.html#profiler\n"); -+ + "https://hbase.apache.org/book.html#profiler\n"); - return; - } - -@@ -172,18 +209,18 @@ public class ProfileServlet extends HttpServlet { - return; - } - -- final int duration = getInteger(req, "duration", DEFAULT_DURATION_SECONDS); -- final Output output = getOutput(req); -- final Event event = getEvent(req); -- final Long interval = getLong(req, "interval"); -- final Integer jstackDepth = getInteger(req, "jstackdepth", null); -- final Long bufsize = getLong(req, "bufsize"); -- final boolean thread = req.getParameterMap().containsKey("thread"); -- final boolean simple = req.getParameterMap().containsKey("simple"); -- final Integer width = getInteger(req, "width", null); -- final Integer height = getInteger(req, "height", null); -- final Double minwidth = getMinWidth(req); -- final boolean reverse = req.getParameterMap().containsKey("reverse"); -+ Event event = getEvent(req); -+ int duration = getInteger(req, "duration", DEFAULT_DURATION_SECONDS); -+ Long interval = getLong(req, "interval"); -+ Integer jstackDepth = getInteger(req, "jstackdepth", null); -+ boolean thread = req.getParameterMap().containsKey("thread"); -+ boolean simple = req.getParameterMap().containsKey("simple"); -+ boolean signature = req.getParameterMap().containsKey("signature"); -+ boolean annotate = req.getParameterMap().containsKey("annotate"); -+ boolean prependLib = req.getParameterMap().containsKey("prependlib"); -+ Output output = getOutput(req); -+ Double minwidth = getMinWidth(req); -+ boolean reverse = req.getParameterMap().containsKey("reverse"); - - if (process == null || !process.isAlive()) { - try { -@@ -198,11 +235,7 @@ public class ProfileServlet extends HttpServlet { - cmd.add("-e"); - cmd.add(event.getInternalName()); - cmd.add("-d"); -- cmd.add("" + duration); -- cmd.add("-o"); -- cmd.add(output.name().toLowerCase()); -- cmd.add("-f"); -- cmd.add(outputFile.getAbsolutePath()); -+ cmd.add(String.valueOf(duration)); - if (interval != null) { - cmd.add("-i"); - cmd.add(interval.toString()); -@@ -211,24 +244,25 @@ public class ProfileServlet extends HttpServlet { - cmd.add("-j"); - cmd.add(jstackDepth.toString()); - } -- if (bufsize != null) { -- cmd.add("-b"); -- cmd.add(bufsize.toString()); -- } - if (thread) { - cmd.add("-t"); - } - if (simple) { - cmd.add("-s"); - } -- if (width != null) { -- cmd.add("--width"); -- cmd.add(width.toString()); -+ if (signature) { -+ cmd.add("-g"); - } -- if (height != null) { -- cmd.add("--height"); -- cmd.add(height.toString()); -+ if (annotate) { -+ cmd.add("-a"); - } -+ if (prependLib) { -+ cmd.add("-l"); -+ } -+ cmd.add("-o"); -+ cmd.add(output.name().toLowerCase()); -+ cmd.add("-f"); -+ cmd.add(outputFile.getAbsolutePath()); - if (minwidth != null) { - cmd.add("--minwidth"); - cmd.add(minwidth.toString()); -@@ -236,6 +270,7 @@ public class ProfileServlet extends HttpServlet { - if (reverse) { - cmd.add("--reverse"); - } -+ - cmd.add(pid.toString()); - process = ProcessUtils.runCmdAsync(cmd); - -@@ -246,7 +281,10 @@ public class ProfileServlet extends HttpServlet { - resp.getWriter() - .write("Started [" + event.getInternalName() - + "] profiling. This page will automatically redirect to " + relativeUrl + " after " -- + duration + " seconds.\n\nCommand:\n" + Joiner.on(" ").join(cmd)); -+ + duration + " seconds. " -+ + "If empty diagram and Linux 4.6+, see 'Basic Usage' section on the Async " -+ + "Profiler Home Page, https://github.com/jvm-profiling-tools/async-profiler." -+ + "\n\nCommand:\n" + Joiner.on(" ").join(cmd)); - - // to avoid auto-refresh by ProfileOutputServlet, refreshDelay can be specified - // via url param -@@ -264,8 +302,9 @@ public class ProfileServlet extends HttpServlet { - resp.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); - resp.getWriter() - .write("Unable to acquire lock. Another instance of profiler might be running."); -- LOG.warn("Unable to acquire lock in " + lockTimeoutSecs -- + " seconds. Another instance of profiler might be running."); -+ LOG.warn( -+ "Unable to acquire lock in {} seconds. Another instance of profiler might be running.", -+ lockTimeoutSecs); - } - } catch (InterruptedException e) { - LOG.warn("Interrupted while acquiring profile lock.", e); -@@ -278,9 +317,9 @@ public class ProfileServlet extends HttpServlet { - } - } - -- private Integer getInteger(final HttpServletRequest req, final String param, -+ private static Integer getInteger(final HttpServletRequest req, final String param, - final Integer defaultValue) { -- final String value = req.getParameter(param); -+ String value = req.getParameter(param); - if (value != null) { - try { - return Integer.valueOf(value); -@@ -291,8 +330,8 @@ public class ProfileServlet extends HttpServlet { - return defaultValue; - } - -- private Long getLong(final HttpServletRequest req, final String param) { -- final String value = req.getParameter(param); -+ private static Long getLong(final HttpServletRequest req, final String param) { -+ String value = req.getParameter(param); - if (value != null) { - try { - return Long.valueOf(value); -@@ -303,8 +342,8 @@ public class ProfileServlet extends HttpServlet { - return null; - } - -- private Double getMinWidth(final HttpServletRequest req) { -- final String value = req.getParameter("minwidth"); -+ private static Double getMinWidth(final HttpServletRequest req) { -+ String value = req.getParameter("minwidth"); - if (value != null) { - try { - return Double.valueOf(value); -@@ -315,8 +354,8 @@ public class ProfileServlet extends HttpServlet { - return null; - } - -- private Event getEvent(final HttpServletRequest req) { -- final String eventArg = req.getParameter("event"); -+ private static Event getEvent(final HttpServletRequest req) { -+ String eventArg = req.getParameter("event"); - if (eventArg != null) { - Event event = Event.fromInternalName(eventArg); - return event == null ? Event.CPU : event; -@@ -324,16 +363,16 @@ public class ProfileServlet extends HttpServlet { - return Event.CPU; - } - -- private Output getOutput(final HttpServletRequest req) { -- final String outputArg = req.getParameter("output"); -+ private static Output getOutput(final HttpServletRequest req) { -+ String outputArg = req.getParameter("output"); - if (req.getParameter("output") != null) { - try { - return Output.valueOf(outputArg.trim().toUpperCase()); - } catch (IllegalArgumentException e) { -- return Output.SVG; -+ return Output.FLAMEGRAPH; - } - } -- return Output.SVG; -+ return Output.FLAMEGRAPH; - } - - static void setResponseHeader(final HttpServletResponse response) { -@@ -365,8 +404,7 @@ public class ProfileServlet extends HttpServlet { - .write("The profiler servlet was disabled at startup.\n\n" - + "Please ensure the prerequisites for the Profiler Servlet have been installed and the\n" - + "environment is properly configured. For more information please see\n" -- + "http://hbase.apache.org/book.html#profiler\n"); -- return; -+ + "https://hbase.apache.org/book.html#profiler\n"); - } - - } diff --git a/hbase/stackable/patches/2.4.17/006-patch-cyclonedx-plugin.patch b/hbase/stackable/patches/2.4.17/006-patch-cyclonedx-plugin.patch deleted file mode 100644 index 86aaafb9f..000000000 --- a/hbase/stackable/patches/2.4.17/006-patch-cyclonedx-plugin.patch +++ /dev/null @@ -1,17 +0,0 @@ -diff --git a/pom.xml b/pom.xml -index e7df337..566414e 100755 ---- a/pom.xml -+++ b/pom.xml -@@ -2337,7 +2337,11 @@ - - org.cyclonedx - cyclonedx-maven-plugin -- 2.7.3 -+ 2.8.0 -+ -+ application -+ 1.5 -+ - - - diff --git a/hbase/stackable/patches/2.6.0/06-CVE-2024-36114-bump-aircompressor-0-27.patch b/hbase/stackable/patches/2.6.0/06-CVE-2024-36114-bump-aircompressor-0-27.patch new file mode 100644 index 000000000..066a32eb5 --- /dev/null +++ b/hbase/stackable/patches/2.6.0/06-CVE-2024-36114-bump-aircompressor-0-27.patch @@ -0,0 +1,33 @@ +Fix CVE-2024-36114 +see https://github.com/stackabletech/vulnerabilities/issues/834 + +Aircompressor is a library with ports of the Snappy, LZO, LZ4, and +Zstandard compression algorithms to Java. All decompressor +implementations of Aircompressor (LZ4, LZO, Snappy, Zstandard) can crash +the JVM for certain input, and in some cases also leak the content of +other memory of the Java process (which could contain sensitive +information). When decompressing certain data, the decompressors try to +access memory outside the bounds of the given byte arrays or byte +buffers. Because Aircompressor uses the JDK class sun.misc.Unsafe to +speed up memory access, no additional bounds checks are performed and +this has similar security consequences as out-of-bounds access in C or +C++, namely it can lead to non-deterministic behavior or crash the JVM. +Users should update to Aircompressor 0.27 or newer where these issues +have been fixed. When decompressing data from untrusted users, this can +be exploited for a denial-of-service attack by crashing the JVM, or to +leak other sensitive information from the Java process. There are no +known workarounds for this issue. + +diff --git a/pom.xml b/pom.xml +index 918cdaa675..bc7ed28404 100644 +--- a/pom.xml ++++ b/pom.xml +@@ -655,7 +655,7 @@ + 2.27.2 + 3.12.0 + +- 0.24 ++ 0.27 + 1.11.0 + 1.8.0 + 1.1.10.4 diff --git a/hbase/stackable/patches/2.6.0/series b/hbase/stackable/patches/2.6.0/series index e0d842845..f64737007 100644 --- a/hbase/stackable/patches/2.6.0/series +++ b/hbase/stackable/patches/2.6.0/series @@ -4,3 +4,4 @@ 03-patch-updates.patch 04-include-dataformat-xml.patch 05-patch-cyclonedx-plugin.patch +06-CVE-2024-36114-bump-aircompressor-0-27.patch