Skip to content

Commit

Permalink
5
Browse files Browse the repository at this point in the history
  • Loading branch information
morningman committed Jan 10, 2025
1 parent dcbee58 commit cd560f4
Show file tree
Hide file tree
Showing 7 changed files with 35 additions and 17 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -28,22 +28,28 @@
import java.util.Map;

public class CatalogProperties {
protected Map<String, String> origProps;

protected void normalizedAndCheckProps(Map<String, String> origProps) {
protected CatalogProperties(Map<String, String> origProps) {
this.origProps = origProps;
normalizedAndCheckProps();
}

protected void normalizedAndCheckProps() {
// 1. prepare phase
Map<String, String> resultProps = loadConfigFromFile(getResouceConfigPropName());
Map<String, String> allProps = loadConfigFromFile(getResourceConfigPropName());
// 2. overwrite result properties with original properties
resultProps.putAll(origProps);
allProps.putAll(origProps);
// 3. set fields from resultProps
List<Field> supportedProps = PropertyUtils.getConnectorProperties(this.getClass());
for (Field field : supportedProps) {
field.setAccessible(true);
ConnectorProperty anno = field.getAnnotation(ConnectorProperty.class);
String[] names = anno.names();
for (String name : names) {
if (origProps.containsKey(name)) {
if (allProps.containsKey(name)) {
try {
field.set(this, origProps.get(name));
field.set(this, allProps.get(name));
} catch (IllegalAccessException e) {
throw new RuntimeException("Failed to set property " + name + ", " + e.getMessage(), e);
}
Expand All @@ -58,7 +64,7 @@ protected void normalizedAndCheckProps(Map<String, String> origProps) {
// Some properties may be loaded from file
// Subclass can override this method to load properties from file.
// The return value is the properties loaded from file, not include original properties
private Map<String, String> loadConfigFromFile(String resourceConfig) {
protected Map<String, String> loadConfigFromFile(String resourceConfig) {
if (Strings.isNullOrEmpty(resourceConfig)) {
return Maps.newHashMap();
}
Expand All @@ -71,7 +77,7 @@ private Map<String, String> loadConfigFromFile(String resourceConfig) {
}

// Subclass can override this method to return the property name of resource config.
protected String getResouceConfigPropName() {
protected String getResourceConfigPropName() {
return "";
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ private void initHadoopFileIOProps(StorageProperties storeProps) throws UserExce
+ storeProps.getType());
}
HDFSProperties hdfsProps = (HDFSProperties) storeProps;

hdfsProps.toHadoopConfiguration(hadoopConf);
}

private void initS3FileIOProps(StorageProperties storeProps) throws UserException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ public class AliyunDLFProperties extends MetastoreProperties {

private static final String DLF_PREFIX = "dlf.";

private Map<String, String> otherDlfProps = Maps.newHashMap();
private final Map<String, String> otherDlfProps = Maps.newHashMap();

public AliyunDLFProperties(Map<String, String> origProps) {
super(Type.DLF, origProps);
Expand Down Expand Up @@ -98,7 +98,7 @@ private String getEndpointOrFromRegion(String endpoint, String region, String dl
}

@Override
protected String getResouceConfigPropName() {
protected String getResourceConfigPropName() {
return "dlf.resouce_config";
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,13 @@
import org.apache.doris.datasource.property.ConnectorProperty;

import com.google.common.base.Strings;
import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.conf.Configuration;
import org.apache.paimon.options.Options;

import java.util.Map;

@Slf4j
public class HMSProperties extends MetastoreProperties {
@ConnectorProperty(names = {"hive.metastore.uri"},
description = "The uri of the hive metastore.")
Expand Down Expand Up @@ -55,7 +57,7 @@ public HMSProperties(Map<String, String> origProps) {
}

@Override
protected String getResouceConfigPropName() {
protected String getResourceConfigPropName() {
return "hive.resource_config";
}

Expand All @@ -74,6 +76,8 @@ protected void checkRequiredProperties() {

public void toPaimonOptionsAndConf(Options options, Configuration conf) {
options.set("uri", hiveMetastoreUri);
Map<String, String> allProps = loadConfigFromFile(getResourceConfigPropName());
allProps.forEach(conf::set);
conf.set("hive.metastore.authentication.type", hiveMetastoreAuthenticationType);
if ("kerberos".equalsIgnoreCase(hiveMetastoreAuthenticationType)) {
conf.set("hive.metastore.service.principal", hiveMetastoreServicePrincipal);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ public enum Type {
protected MetastoreProperties.Type type;

public MetastoreProperties(Type type, Map<String, String> origProps) {
super(origProps);
this.type = type;
normalizedAndCheckProps(origProps);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import org.apache.doris.datasource.property.ConnectorProperty;

import com.google.common.base.Strings;
import org.apache.hadoop.conf.Configuration;

import java.util.Map;

Expand Down Expand Up @@ -59,13 +60,10 @@ public HDFSProperties(Map<String, String> origProps) {
}

@Override
protected String getResouceConfigPropName() {
protected String getResourceConfigPropName() {
return "hdfs.resource_config";
}

s
@Override

protected void checkRequiredProperties() {
super.checkRequiredProperties();
if ("kerberos".equalsIgnoreCase(hdfsAuthenticationType)) {
Expand All @@ -76,4 +74,14 @@ protected void checkRequiredProperties() {
}
}
}

public void toHadoopConfiguration(Configuration conf) {
Map<String, String> allProps = loadConfigFromFile(getResourceConfigPropName());
allProps.forEach(conf::set);
conf.set("hdfs.authentication.type", hdfsAuthenticationType);
if ("kerberos".equalsIgnoreCase(hdfsAuthenticationType)) {
conf.set("hdfs.authentication.kerberos.principal", hdfsKerberosPrincipal);
conf.set("hdfs.authentication.kerberos.keytab", hdfsKerberosKeytab);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ public enum Type {
protected Type type;

public StorageProperties(Type type, Map<String, String> origProps) {
super(origProps);
this.type = type;
normalizedAndCheckProps(origProps);
}
}

0 comments on commit cd560f4

Please sign in to comment.