Skip to content

Commit

Permalink
[fix](multi-catalog)fix getting ugi methods and unify them (apache#30844
Browse files Browse the repository at this point in the history
)

put all ugi login methods to HadoopUGI
  • Loading branch information
wsjz authored Feb 19, 2024
1 parent b027c72 commit 6e4a2f5
Show file tree
Hide file tree
Showing 14 changed files with 305 additions and 163 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@
import org.apache.doris.common.jni.JniScanner;
import org.apache.doris.common.jni.vec.ColumnType;
import org.apache.doris.common.jni.vec.ScanPredicate;
import org.apache.doris.common.security.authentication.AuthenticationConfig;
import org.apache.doris.common.security.authentication.HadoopUGI;

import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.avro.generic.GenericDatumReader;
Expand Down Expand Up @@ -138,7 +140,7 @@ public HudiJniScanner(int fetchSize, Map<String, String> params) {
predicates = new ScanPredicate[0];
}
}
ugi = Utils.getUserGroupInformation(split.hadoopConf());
ugi = HadoopUGI.loginWithUGI(AuthenticationConfig.getKerberosConfig(split.hadoopConf()));
} catch (Exception e) {
LOG.error("Failed to initialize hudi scanner, split params:\n" + debugString, e);
throw e;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,9 @@

package org.apache.doris.hudi;

import org.apache.doris.common.security.authentication.AuthenticationConfig;
import org.apache.doris.common.security.authentication.HadoopUGI;

import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
Expand All @@ -36,35 +39,6 @@
import java.util.List;

public class Utils {
public static class Constants {
public static String HADOOP_USER_NAME = "hadoop.username";
public static String HADOOP_SECURITY_AUTHENTICATION = "hadoop.security.authentication";
public static String HADOOP_KERBEROS_PRINCIPAL = "hadoop.kerberos.principal";
public static String HADOOP_KERBEROS_KEYTAB = "hadoop.kerberos.keytab";
}

public static UserGroupInformation getUserGroupInformation(Configuration conf) {
String authentication = conf.get(Constants.HADOOP_SECURITY_AUTHENTICATION, null);
if ("kerberos".equals(authentication)) {
conf.set("hadoop.security.authorization", "true");
UserGroupInformation.setConfiguration(conf);
String principal = conf.get(Constants.HADOOP_KERBEROS_PRINCIPAL);
String keytab = conf.get(Constants.HADOOP_KERBEROS_KEYTAB);
try {
UserGroupInformation ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, keytab);
UserGroupInformation.setLoginUser(ugi);
return ugi;
} catch (IOException e) {
throw new RuntimeException(e);
}
} else {
String hadoopUserName = conf.get(Constants.HADOOP_USER_NAME);
if (hadoopUserName != null) {
return UserGroupInformation.createRemoteUser(hadoopUserName);
}
}
return null;
}

public static long getCurrentProcId() {
try {
Expand Down Expand Up @@ -114,7 +88,7 @@ public static void killProcess(long pid) {
}

public static HoodieTableMetaClient getMetaClient(Configuration conf, String basePath) {
UserGroupInformation ugi = getUserGroupInformation(conf);
UserGroupInformation ugi = HadoopUGI.loginWithUGI(AuthenticationConfig.getKerberosConfig(conf));
HoodieTableMetaClient metaClient;
if (ugi != null) {
try {
Expand Down
15 changes: 15 additions & 0 deletions fe/fe-common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,21 @@ under the License.
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<exclusions>
<exclusion>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.commons</groupId>
<artifactId>commons-compress</artifactId>
</exclusion>
</exclusions>
<scope>provided</scope>
</dependency>
</dependencies>
<build>
<finalName>doris-fe-common</finalName>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.

package org.apache.doris.catalog;
package org.apache.doris.common.security.authentication;

/**
* Define different auth type for external table such as hive/iceberg
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.

package org.apache.doris.common.security.authentication;

import lombok.Data;
import org.apache.hadoop.conf.Configuration;

@Data
public abstract class AuthenticationConfig {

public static String HADOOP_USER_NAME = "hadoop.username";
public static String HADOOP_SECURITY_AUTHENTICATION = "hadoop.security.authentication";
public static String HADOOP_KERBEROS_PRINCIPAL = "hadoop.kerberos.principal";
public static String HADOOP_KERBEROS_AUTHORIZATION = "hadoop.security.authorization";
public static String HADOOP_KERBEROS_KEYTAB = "hadoop.kerberos.keytab";
public static String HIVE_KERBEROS_PRINCIPAL = "hive.metastore.kerberos.principal";
public static String HIVE_KERBEROS_KEYTAB = "hive.metastore.kerberos.keytab.file";

private boolean isValid;

/**
* get kerberos config from hadoop conf
* @param conf config
* @return ugi
*/
public static AuthenticationConfig getKerberosConfig(Configuration conf) {
return AuthenticationConfig.getKerberosConfig(conf, HADOOP_KERBEROS_PRINCIPAL, HADOOP_KERBEROS_KEYTAB);
}

/**
* get kerberos config from hadoop conf
* @param conf config
* @param krbPrincipalKey principal key
* @param krbKeytabKey keytab key
* @return ugi
*/
public static AuthenticationConfig getKerberosConfig(Configuration conf,
String krbPrincipalKey,
String krbKeytabKey) {
String authentication = conf.get(HADOOP_SECURITY_AUTHENTICATION, null);
if (AuthType.KERBEROS.getDesc().equals(authentication)) {
KerberosAuthenticationConfig krbConfig = new KerberosAuthenticationConfig();
krbConfig.setKerberosPrincipal(conf.get(krbPrincipalKey));
krbConfig.setKerberosKeytab(conf.get(krbKeytabKey));
krbConfig.setConf(conf);
return krbConfig;
} else {
// AuthType.SIMPLE
SimpleAuthenticationConfig simpleAuthenticationConfig = new SimpleAuthenticationConfig();
simpleAuthenticationConfig.setUsername(conf.get(HADOOP_USER_NAME));
return simpleAuthenticationConfig;
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.

package org.apache.doris.common.security.authentication;

import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;

import java.io.IOException;

public class HadoopUGI {
private static final Logger LOG = LogManager.getLogger(HadoopUGI.class);

/**
* login and return hadoop ugi
* @param config auth config
* @return ugi
*/
public static UserGroupInformation loginWithUGI(AuthenticationConfig config) {
UserGroupInformation ugi;
if (config instanceof KerberosAuthenticationConfig) {
KerberosAuthenticationConfig krbConfig = (KerberosAuthenticationConfig) config;
Configuration hadoopConf = krbConfig.getConf();
hadoopConf.set(AuthenticationConfig.HADOOP_KERBEROS_AUTHORIZATION, "true");
UserGroupInformation.setConfiguration(hadoopConf);
String principal = krbConfig.getKerberosPrincipal();
try {
// login hadoop with keytab and try checking TGT
ugi = UserGroupInformation.getLoginUser();
LOG.debug("Current login user: {}", ugi.getUserName());
if (ugi.hasKerberosCredentials() && StringUtils.equals(ugi.getUserName(), principal)) {
// if the current user is logged by kerberos and is the same user
// just use checkTGTAndReloginFromKeytab because this method will only relogin
// when the TGT is expired or is close to expiry
ugi.checkTGTAndReloginFromKeytab();
return ugi;
}
} catch (IOException e) {
LOG.warn("A SecurityException occurs with kerberos, do login immediately.", e);
}
try {
ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, krbConfig.getKerberosKeytab());
UserGroupInformation.setLoginUser(ugi);
LOG.debug("Login by kerberos authentication with principal: {}", principal);
return ugi;
} catch (IOException e) {
throw new RuntimeException(e);
}
} else {
String hadoopUserName = ((SimpleAuthenticationConfig) config).getUsername();
if (hadoopUserName == null) {
hadoopUserName = "hadoop";
LOG.debug(AuthenticationConfig.HADOOP_USER_NAME + " is unset, use default user: hadoop");
}
ugi = UserGroupInformation.createRemoteUser(hadoopUserName);
UserGroupInformation.setLoginUser(ugi);
LOG.debug("Login by proxy user, hadoop.username: {}", hadoopUserName);
return ugi;
}
}

/**
* use for HMSExternalCatalog to login
* @param config auth config
*/
public static void tryKrbLogin(String catalogName, AuthenticationConfig config) {
if (config instanceof KerberosAuthenticationConfig) {
KerberosAuthenticationConfig krbConfig = (KerberosAuthenticationConfig) config;
try {
/**
* Because metastore client is created by using
* {@link org.apache.hadoop.hive.metastore.RetryingMetaStoreClient#getProxy}
* it will relogin when TGT is expired, so we don't need to relogin manually.
*/
UserGroupInformation.loginUserFromKeytab(krbConfig.getKerberosPrincipal(),
krbConfig.getKerberosKeytab());
} catch (IOException e) {
throw new RuntimeException("login with kerberos auth failed for catalog: " + catalogName, e);
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.

package org.apache.doris.common.security.authentication;

import lombok.Data;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;

@Data
public class KerberosAuthenticationConfig extends AuthenticationConfig {
private String kerberosPrincipal;
private String kerberosKeytab;
private Configuration conf;

@Override
public boolean isValid() {
return StringUtils.isNotEmpty(kerberosPrincipal) && StringUtils.isNotEmpty(kerberosKeytab);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.

package org.apache.doris.common.security.authentication;

import lombok.Data;
import org.apache.commons.lang3.StringUtils;

@Data
public class SimpleAuthenticationConfig extends AuthenticationConfig {
private String username;

@Override
public boolean isValid() {
return StringUtils.isNotEmpty(username);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

import org.apache.doris.common.DdlException;
import org.apache.doris.common.proc.BaseProcResult;
import org.apache.doris.common.security.authentication.AuthenticationConfig;
import org.apache.doris.thrift.THdfsConf;
import org.apache.doris.thrift.THdfsParams;

Expand All @@ -44,12 +45,6 @@
public class HdfsResource extends Resource {
public static final String HADOOP_FS_PREFIX = "dfs.";
public static String HADOOP_FS_NAME = "fs.defaultFS";
// simple or kerberos
public static String HADOOP_USER_NAME = "hadoop.username";
public static String HADOOP_SECURITY_AUTHENTICATION = "hadoop.security.authentication";
public static String HADOOP_KERBEROS_PRINCIPAL = "hadoop.kerberos.principal";
public static String HADOOP_KERBEROS_AUTHORIZATION = "hadoop.security.authorization";
public static String HADOOP_KERBEROS_KEYTAB = "hadoop.kerberos.keytab";
public static String HADOOP_SHORT_CIRCUIT = "dfs.client.read.shortcircuit";
public static String HADOOP_SOCKET_PATH = "dfs.domain.socket.path";
public static String DSF_NAMESERVICES = "dfs.nameservices";
Expand Down Expand Up @@ -107,11 +102,11 @@ public static THdfsParams generateHdfsParam(Map<String, String> properties) {
for (Map.Entry<String, String> property : properties.entrySet()) {
if (property.getKey().equalsIgnoreCase(HADOOP_FS_NAME)) {
tHdfsParams.setFsName(property.getValue());
} else if (property.getKey().equalsIgnoreCase(HADOOP_USER_NAME)) {
} else if (property.getKey().equalsIgnoreCase(AuthenticationConfig.HADOOP_USER_NAME)) {
tHdfsParams.setUser(property.getValue());
} else if (property.getKey().equalsIgnoreCase(HADOOP_KERBEROS_PRINCIPAL)) {
} else if (property.getKey().equalsIgnoreCase(AuthenticationConfig.HADOOP_KERBEROS_PRINCIPAL)) {
tHdfsParams.setHdfsKerberosPrincipal(property.getValue());
} else if (property.getKey().equalsIgnoreCase(HADOOP_KERBEROS_KEYTAB)) {
} else if (property.getKey().equalsIgnoreCase(AuthenticationConfig.HADOOP_KERBEROS_KEYTAB)) {
tHdfsParams.setHdfsKerberosKeytab(property.getValue());
} else {
THdfsConf hdfsConf = new THdfsConf();
Expand Down
Loading

0 comments on commit 6e4a2f5

Please sign in to comment.