From 1482cac75936c521a5138ee3793e32cea6506b2b Mon Sep 17 00:00:00 2001 From: Venu Reddy Date: Fri, 15 May 2026 22:33:34 +0530 Subject: [PATCH] HIVE-29615: Fix Hive Metastore and NameNode connection failure due to SASL no common protection layer between client and server --- .../client/ThriftHiveMetaStoreClient.java | 12 ++++----- .../hive/metastore/utils/MetaStoreUtils.java | 27 ------------------- .../hadoop/hive/metastore/AuthFactory.java | 5 ++-- .../hadoop/hive/metastore/HiveMetaStore.java | 2 +- .../hive/metastore/tools/HMSClient.java | 12 ++++----- 5 files changed, 13 insertions(+), 45 deletions(-) diff --git a/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/ThriftHiveMetaStoreClient.java b/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/ThriftHiveMetaStoreClient.java index 1351600e737a..4cce91d84b19 100644 --- a/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/ThriftHiveMetaStoreClient.java +++ b/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/ThriftHiveMetaStoreClient.java @@ -835,7 +835,6 @@ private TTransport createAuthBinaryTransport(URI store, TTransport underlyingTra TTransport transport = underlyingTransport; boolean useFramedTransport = MetastoreConf.getBoolVar(conf, MetastoreConf.ConfVars.USE_THRIFT_FRAMED_TRANSPORT); - boolean useSSL = MetastoreConf.getBoolVar(conf, MetastoreConf.ConfVars.USE_SSL); boolean useSasl = MetastoreConf.getBoolVar(conf, MetastoreConf.ConfVars.USE_THRIFT_SASL); String clientAuthMode = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.METASTORE_CLIENT_AUTH_MODE); boolean usePasswordAuth = false; @@ -873,9 +872,9 @@ private TTransport createAuthBinaryTransport(URI store, TTransport underlyingTra } else if (useSasl) { // Wrap thrift connection with SASL for secure connection. try { - HadoopThriftAuthBridge.Client authBridge = - HadoopThriftAuthBridge.getBridge().createClient(); - + HadoopThriftAuthBridge bridge = HadoopThriftAuthBridge.getBridge(); + Map saslProperties = bridge.getHadoopSaslProperties(conf); + HadoopThriftAuthBridge.Client authBridge = bridge.createClient(); // check if we should use delegation tokens to authenticate // the call below gets hold of the tokens if they are set up by hadoop // this should happen on the map/reduce tasks if the client added the @@ -889,15 +888,14 @@ private TTransport createAuthBinaryTransport(URI store, TTransport underlyingTra LOG.debug("HMSC::open(): Found delegation token. Creating DIGEST-based thrift connection."); // authenticate using delegation tokens via the "DIGEST" mechanism transport = authBridge.createClientTransport(null, store.getHost(), - "DIGEST", tokenStrForm, underlyingTransport, - MetaStoreUtils.getMetaStoreSaslProperties(conf, useSSL)); + "DIGEST", tokenStrForm, underlyingTransport, saslProperties); } else { LOG.debug("HMSC::open(): Could not find delegation token. Creating KERBEROS-based thrift connection."); String principalConfig = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.KERBEROS_PRINCIPAL); transport = authBridge.createClientTransport( principalConfig, store.getHost(), "KERBEROS", null, - underlyingTransport, MetaStoreUtils.getMetaStoreSaslProperties(conf, useSSL)); + underlyingTransport, saslProperties); } } catch (IOException ioe) { LOG.error("Failed to create client transport", ioe); diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java index 473a11cfa939..3339f1c5a3ca 100644 --- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java +++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java @@ -49,7 +49,6 @@ import com.google.common.collect.Lists; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.common.TableName; @@ -71,8 +70,6 @@ import org.apache.hadoop.hive.metastore.api.WMPoolSchedulingPolicy; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; -import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge; -import org.apache.hadoop.security.SaslRpcServer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -559,30 +556,6 @@ public static int getArchivingLevel(Partition part) throws MetaException { return part.getValues().size(); } - /** - * Read and return the meta store Sasl configuration. Currently it uses the default - * Hadoop SASL configuration and can be configured using "hadoop.rpc.protection" - * HADOOP-10211, made a backward incompatible change due to which this call doesn't - * work with Hadoop 2.4.0 and later. - * @param conf - * @return The SASL configuration - */ - public static Map getMetaStoreSaslProperties(Configuration conf, boolean useSSL) { - // As of now Hive Meta Store uses the same configuration as Hadoop SASL configuration - - // If SSL is enabled, override the given value of "hadoop.rpc.protection" and set it to "authentication" - // This disables any encryption provided by SASL, since SSL already provides it - String hadoopRpcProtectionVal = conf.get(CommonConfigurationKeysPublic.HADOOP_RPC_PROTECTION); - String hadoopRpcProtectionAuth = SaslRpcServer.QualityOfProtection.AUTHENTICATION.toString(); - - if (useSSL && hadoopRpcProtectionVal != null && !hadoopRpcProtectionVal.equals(hadoopRpcProtectionAuth)) { - LOG.warn("Overriding value of " + CommonConfigurationKeysPublic.HADOOP_RPC_PROTECTION + " setting it from " - + hadoopRpcProtectionVal + " to " + hadoopRpcProtectionAuth + " because SSL is enabled"); - conf.set(CommonConfigurationKeysPublic.HADOOP_RPC_PROTECTION, hadoopRpcProtectionAuth); - } - return HadoopThriftAuthBridge.getBridge().getHadoopSaslProperties(conf); - } - /** * Returns currently known class paths as best effort. For system class loader, this may return * In such cases we will anyway create new child class loader in {@link #addToClassPath(ClassLoader cloader, String[] newPaths) diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/AuthFactory.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/AuthFactory.java index 816337b25da5..4ab16b73bf2e 100644 --- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/AuthFactory.java +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/AuthFactory.java @@ -29,7 +29,6 @@ import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars; import org.apache.hadoop.hive.metastore.security.TUGIContainingTransport; -import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge; import org.apache.hadoop.hive.metastore.security.MetastoreDelegationTokenManager; import org.apache.thrift.transport.layered.TFramedTransport; @@ -118,7 +117,7 @@ public AuthFactory(HadoopThriftAuthBridge bridge, Configuration conf, Object bas } } - TTransportFactory getAuthTransFactory(boolean useSSL, Configuration conf) throws LoginException { + TTransportFactory getAuthTransFactory(HadoopThriftAuthBridge bridge, Configuration conf) throws LoginException { TTransportFactory transportFactory; TSaslServerTransport.Factory serverTransportFactory; @@ -128,7 +127,7 @@ TTransportFactory getAuthTransFactory(boolean useSSL, Configuration conf) throws throw new LoginException("Framed transport is not supported with SASL enabled."); } serverTransportFactory = saslServer.createSaslServerTransportFactory( - MetaStoreUtils.getMetaStoreSaslProperties(conf, useSSL)); + bridge.getHadoopSaslProperties(conf)); transportFactory = new ChainedTTransportFactory( saslServer.wrapTransportFactoryInClientUGI(serverTransportFactory), new TUGIContainingTransport.Factory()); } catch (TTransportException e) { diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index 17aaeeec52c5..b7a0004c76bd 100644 --- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -504,7 +504,7 @@ private static ThriftServer startBinaryMetastore(int port, HadoopThriftAuthBridg } TProcessor processor; - TTransportFactory transFactory = authFactory.getAuthTransFactory(useSSL, conf); + TTransportFactory transFactory = authFactory.getAuthTransFactory(bridge, conf); final TProtocolFactory protocolFactory; final TProtocolFactory inputProtoFactory; if (useCompactProtocol) { diff --git a/standalone-metastore/metastore-tools/tools-common/src/main/java/org/apache/hadoop/hive/metastore/tools/HMSClient.java b/standalone-metastore/metastore-tools/tools-common/src/main/java/org/apache/hadoop/hive/metastore/tools/HMSClient.java index 173a5f6dcdb0..cce061600c93 100644 --- a/standalone-metastore/metastore-tools/tools-common/src/main/java/org/apache/hadoop/hive/metastore/tools/HMSClient.java +++ b/standalone-metastore/metastore-tools/tools-common/src/main/java/org/apache/hadoop/hive/metastore/tools/HMSClient.java @@ -46,7 +46,6 @@ import org.apache.hadoop.hive.metastore.api.TxnType; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge; -import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; import org.apache.hadoop.hive.metastore.utils.SecurityUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.thrift.TConfiguration; @@ -467,9 +466,9 @@ private TTransport open(Configuration conf, @NotNull URI uri) throws if (useSasl) { // Wrap thrift connection with SASL for secure connection. - HadoopThriftAuthBridge.Client authBridge = - HadoopThriftAuthBridge.getBridge().createClient(); - + HadoopThriftAuthBridge bridge = HadoopThriftAuthBridge.getBridge(); + Map saslProperties = bridge.getHadoopSaslProperties(conf); + HadoopThriftAuthBridge.Client authBridge = bridge.createClient(); // check if we should use delegation tokens to authenticate // the call below gets hold of the tokens if they are set up by hadoop // this should happen on the map/reduce tasks if the client added the @@ -483,15 +482,14 @@ private TTransport open(Configuration conf, @NotNull URI uri) throws LOG.info("HMSC::open(): Found delegation token. Creating DIGEST-based thrift connection."); // authenticate using delegation tokens via the "DIGEST" mechanism transport = authBridge.createClientTransport(null, host, - "DIGEST", tokenStrForm, transport, - MetaStoreUtils.getMetaStoreSaslProperties(conf, useSSL)); + "DIGEST", tokenStrForm, transport, saslProperties); } else { LOG.info("HMSC::open(): Could not find delegation token. Creating KERBEROS-based thrift connection."); String principalConfig = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.KERBEROS_PRINCIPAL); transport = authBridge.createClientTransport( principalConfig, host, "KERBEROS", null, - transport, MetaStoreUtils.getMetaStoreSaslProperties(conf, useSSL)); + transport, saslProperties); } } else { if (useFramedTransport) {