Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -835,7 +835,6 @@ private TTransport createAuthBinaryTransport(URI store, TTransport underlyingTra
TTransport transport = underlyingTransport;
boolean useFramedTransport =
MetastoreConf.getBoolVar(conf, MetastoreConf.ConfVars.USE_THRIFT_FRAMED_TRANSPORT);
boolean useSSL = MetastoreConf.getBoolVar(conf, MetastoreConf.ConfVars.USE_SSL);
boolean useSasl = MetastoreConf.getBoolVar(conf, MetastoreConf.ConfVars.USE_THRIFT_SASL);
String clientAuthMode = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.METASTORE_CLIENT_AUTH_MODE);
boolean usePasswordAuth = false;
Expand Down Expand Up @@ -873,9 +872,9 @@ private TTransport createAuthBinaryTransport(URI store, TTransport underlyingTra
} else if (useSasl) {
// Wrap thrift connection with SASL for secure connection.
try {
HadoopThriftAuthBridge.Client authBridge =
HadoopThriftAuthBridge.getBridge().createClient();

HadoopThriftAuthBridge bridge = HadoopThriftAuthBridge.getBridge();
Map<String, String> saslProperties = bridge.getHadoopSaslProperties(conf);
HadoopThriftAuthBridge.Client authBridge = bridge.createClient();
// check if we should use delegation tokens to authenticate
// the call below gets hold of the tokens if they are set up by hadoop
// this should happen on the map/reduce tasks if the client added the
Expand All @@ -889,15 +888,14 @@ private TTransport createAuthBinaryTransport(URI store, TTransport underlyingTra
LOG.debug("HMSC::open(): Found delegation token. Creating DIGEST-based thrift connection.");
// authenticate using delegation tokens via the "DIGEST" mechanism
transport = authBridge.createClientTransport(null, store.getHost(),
"DIGEST", tokenStrForm, underlyingTransport,
MetaStoreUtils.getMetaStoreSaslProperties(conf, useSSL));
"DIGEST", tokenStrForm, underlyingTransport, saslProperties);
} else {
LOG.debug("HMSC::open(): Could not find delegation token. Creating KERBEROS-based thrift connection.");
String principalConfig =
MetastoreConf.getVar(conf, MetastoreConf.ConfVars.KERBEROS_PRINCIPAL);
transport = authBridge.createClientTransport(
principalConfig, store.getHost(), "KERBEROS", null,
underlyingTransport, MetaStoreUtils.getMetaStoreSaslProperties(conf, useSSL));
underlyingTransport, saslProperties);
}
} catch (IOException ioe) {
LOG.error("Failed to create client transport", ioe);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,6 @@
import com.google.common.collect.Lists;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.StatsSetupConst;
import org.apache.hadoop.hive.common.TableName;
Expand All @@ -71,8 +70,6 @@
import org.apache.hadoop.hive.metastore.api.WMPoolSchedulingPolicy;
import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
import org.apache.hadoop.security.SaslRpcServer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -559,30 +556,6 @@ public static int getArchivingLevel(Partition part) throws MetaException {
return part.getValues().size();
}

/**
* Read and return the meta store Sasl configuration. Currently it uses the default
* Hadoop SASL configuration and can be configured using "hadoop.rpc.protection"
* HADOOP-10211, made a backward incompatible change due to which this call doesn't
* work with Hadoop 2.4.0 and later.
* @param conf
* @return The SASL configuration
*/
public static Map<String, String> getMetaStoreSaslProperties(Configuration conf, boolean useSSL) {
// As of now Hive Meta Store uses the same configuration as Hadoop SASL configuration

// If SSL is enabled, override the given value of "hadoop.rpc.protection" and set it to "authentication"
// This disables any encryption provided by SASL, since SSL already provides it
String hadoopRpcProtectionVal = conf.get(CommonConfigurationKeysPublic.HADOOP_RPC_PROTECTION);
String hadoopRpcProtectionAuth = SaslRpcServer.QualityOfProtection.AUTHENTICATION.toString();

if (useSSL && hadoopRpcProtectionVal != null && !hadoopRpcProtectionVal.equals(hadoopRpcProtectionAuth)) {
LOG.warn("Overriding value of " + CommonConfigurationKeysPublic.HADOOP_RPC_PROTECTION + " setting it from "
+ hadoopRpcProtectionVal + " to " + hadoopRpcProtectionAuth + " because SSL is enabled");
conf.set(CommonConfigurationKeysPublic.HADOOP_RPC_PROTECTION, hadoopRpcProtectionAuth);
}
return HadoopThriftAuthBridge.getBridge().getHadoopSaslProperties(conf);
}

/**
* Returns currently known class paths as best effort. For system class loader, this may return
* In such cases we will anyway create new child class loader in {@link #addToClassPath(ClassLoader cloader, String[] newPaths)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars;
import org.apache.hadoop.hive.metastore.security.TUGIContainingTransport;
import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
import org.apache.hadoop.hive.metastore.security.MetastoreDelegationTokenManager;
import org.apache.thrift.transport.layered.TFramedTransport;
Expand Down Expand Up @@ -118,7 +117,7 @@
}
}

TTransportFactory getAuthTransFactory(boolean useSSL, Configuration conf) throws LoginException {
TTransportFactory getAuthTransFactory(HadoopThriftAuthBridge bridge, Configuration conf) throws LoginException {

Check failure on line 120 in standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/AuthFactory.java

View check run for this annotation

SonarQubeCloud / SonarCloud Code Analysis

Refactor this method to reduce its Cognitive Complexity from 37 to the 15 allowed.

See more on https://sonarcloud.io/project/issues?id=apache_hive&issues=AZ4tUGwuC2QocoKmkKtu&open=AZ4tUGwuC2QocoKmkKtu&pullRequest=6492
TTransportFactory transportFactory;
TSaslServerTransport.Factory serverTransportFactory;

Expand All @@ -128,7 +127,7 @@
throw new LoginException("Framed transport is not supported with SASL enabled.");
}
serverTransportFactory = saslServer.createSaslServerTransportFactory(
MetaStoreUtils.getMetaStoreSaslProperties(conf, useSSL));
bridge.getHadoopSaslProperties(conf));
transportFactory = new ChainedTTransportFactory(
saslServer.wrapTransportFactoryInClientUGI(serverTransportFactory), new TUGIContainingTransport.Factory());
} catch (TTransportException e) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -504,7 +504,7 @@ private static ThriftServer startBinaryMetastore(int port, HadoopThriftAuthBridg
}

TProcessor processor;
TTransportFactory transFactory = authFactory.getAuthTransFactory(useSSL, conf);
TTransportFactory transFactory = authFactory.getAuthTransFactory(bridge, conf);
final TProtocolFactory protocolFactory;
final TProtocolFactory inputProtoFactory;
if (useCompactProtocol) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@
import org.apache.hadoop.hive.metastore.api.TxnType;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
import org.apache.hadoop.hive.metastore.utils.SecurityUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.thrift.TConfiguration;
Expand Down Expand Up @@ -467,9 +466,9 @@ private TTransport open(Configuration conf, @NotNull URI uri) throws

if (useSasl) {
// Wrap thrift connection with SASL for secure connection.
HadoopThriftAuthBridge.Client authBridge =
HadoopThriftAuthBridge.getBridge().createClient();

HadoopThriftAuthBridge bridge = HadoopThriftAuthBridge.getBridge();
Map<String, String> saslProperties = bridge.getHadoopSaslProperties(conf);
HadoopThriftAuthBridge.Client authBridge = bridge.createClient();
// check if we should use delegation tokens to authenticate
// the call below gets hold of the tokens if they are set up by hadoop
// this should happen on the map/reduce tasks if the client added the
Expand All @@ -483,15 +482,14 @@ private TTransport open(Configuration conf, @NotNull URI uri) throws
LOG.info("HMSC::open(): Found delegation token. Creating DIGEST-based thrift connection.");
// authenticate using delegation tokens via the "DIGEST" mechanism
transport = authBridge.createClientTransport(null, host,
"DIGEST", tokenStrForm, transport,
MetaStoreUtils.getMetaStoreSaslProperties(conf, useSSL));
"DIGEST", tokenStrForm, transport, saslProperties);
} else {
LOG.info("HMSC::open(): Could not find delegation token. Creating KERBEROS-based thrift connection.");
String principalConfig =
MetastoreConf.getVar(conf, MetastoreConf.ConfVars.KERBEROS_PRINCIPAL);
transport = authBridge.createClientTransport(
principalConfig, host, "KERBEROS", null,
transport, MetaStoreUtils.getMetaStoreSaslProperties(conf, useSSL));
transport, saslProperties);
}
} else {
if (useFramedTransport) {
Expand Down
Loading