Skip to content

Commit

Permalink
[Feature] Storage volume support hdfs configuration (StarRocks#33004)
Browse files Browse the repository at this point in the history
Signed-off-by: Zijie Lu <[email protected]>
Signed-off-by: Moonm3n <[email protected]>
  • Loading branch information
TszKitLo40 authored and Moonm3n committed Oct 31, 2023
1 parent 6ffc788 commit bba8c98
Show file tree
Hide file tree
Showing 9 changed files with 248 additions and 44 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,7 @@ public class CloudConfigurationConstants {
public static final String HDFS_PASSWORD = "hadoop.password";
public static final String HDFS_KERBEROS_PRINCIPAL_DEPRECATED = "kerberos_principal";
public static final String HDFS_KERBEROS_PRINCIPAL = "hadoop.kerberos.principal";
public static final String HDFS_KERBEROS_TICKET_CACHE_PATH = "hadoop.security.kerberos.ticket.cache.path";
@Deprecated
public static final String HDFS_KERBEROS_KEYTAB_DEPRECATED = "kerberos_keytab";
public static final String HADOOP_KERBEROS_KEYTAB = "hadoop.kerberos.keytab";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import com.starrocks.credential.azure.AzureCloudConfigurationProvider;
import com.starrocks.credential.gcp.GCPCloudConfigurationProvoder;
import com.starrocks.credential.hdfs.HDFSCloudConfigurationProvider;
import com.starrocks.credential.hdfs.StrictHDFSCloudConfigurationProvider;
import com.starrocks.credential.tencent.TencentCloudConfigurationProvider;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.iceberg.aws.AwsProperties;
Expand All @@ -37,15 +38,28 @@ public class CloudConfigurationFactory {
new AliyunCloudConfigurationProvider(),
new TencentCloudConfigurationProvider(),
new HDFSCloudConfigurationProvider(),
new CloudConfigurationProvider() {
@Override
public CloudConfiguration build(Map<String, String> properties) {
return new CloudConfiguration();
}
});
(Map<String, String> properties) -> new CloudConfiguration());

static ImmutableList<CloudConfigurationProvider> strictCloudConfigurationFactoryChain = ImmutableList.of(
new AWSCloudConfigurationProvider(),
new AzureCloudConfigurationProvider(),
new GCPCloudConfigurationProvoder(),
new AliyunCloudConfigurationProvider(),
new TencentCloudConfigurationProvider(),
new HDFSCloudConfigurationProvider(),
new StrictHDFSCloudConfigurationProvider(),
(Map<String, String> properties) -> new CloudConfiguration());

public static CloudConfiguration buildCloudConfigurationForStorage(Map<String, String> properties) {
for (CloudConfigurationProvider factory : cloudConfigurationFactoryChain) {
return buildCloudConfigurationForStorage(properties, false);
}

public static CloudConfiguration buildCloudConfigurationForStorage(Map<String, String> properties, boolean strictMode) {
ImmutableList<CloudConfigurationProvider> factories = cloudConfigurationFactoryChain;
if (strictMode) {
factories = strictCloudConfigurationFactoryChain;
}
for (CloudConfigurationProvider factory : factories) {
CloudConfiguration cloudConfiguration = factory.build(properties);
if (cloudConfiguration != null) {
cloudConfiguration.loadCommonFields(properties);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

package com.starrocks.credential.hdfs;

import autovalue.shaded.com.google.common.common.base.Preconditions;
import com.google.common.base.Preconditions;
import com.starrocks.credential.CloudConfiguration;
import com.starrocks.credential.CloudConfigurationProvider;

Expand All @@ -35,7 +35,7 @@

public class HDFSCloudConfigurationProvider implements CloudConfigurationProvider {

private static String getOrDefault(Map<String, String> prop, String... args) {
protected static String getOrDefault(Map<String, String> prop, String... args) {
for (String k : args) {
String v = prop.get(k);
if (v != null) {
Expand All @@ -45,8 +45,7 @@ private static String getOrDefault(Map<String, String> prop, String... args) {
return "";
}

@Override
public CloudConfiguration build(Map<String, String> properties) {
protected Map<String, String> preprocessProperties(Map<String, String> properties) {
Preconditions.checkNotNull(properties);
Map<String, String> prop = new HashMap<>(properties);

Expand All @@ -59,6 +58,12 @@ public CloudConfiguration build(Map<String, String> properties) {
for (String k : keys) {
prop.remove(k);
}
return prop;
}

@Override
public CloudConfiguration build(Map<String, String> properties) {
Map<String, String> prop = preprocessProperties(properties);

HDFSCloudCredential hdfsCloudCredential = new HDFSCloudCredential(
getOrDefault(properties, HDFS_AUTHENTICATION),
Expand All @@ -72,7 +77,6 @@ public CloudConfiguration build(Map<String, String> properties) {
if (!hdfsCloudCredential.validate()) {
return null;
}
HDFSCloudConfiguration conf = new HDFSCloudConfiguration(hdfsCloudCredential);
return conf;
return new HDFSCloudConfiguration(hdfsCloudCredential);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,12 @@

import java.util.Map;

import static com.starrocks.credential.CloudConfigurationConstants.HDFS_AUTHENTICATION;

public class HDFSCloudCredential implements CloudCredential {
public static final String EMPTY = "empty";
private String authentication;
public static final String SIMPLE_AUTH = "simple";
public static final String KERBEROS_AUTH = "kerberos";
protected String authentication;
private String userName;
private String password;
private String krbPrincipal;
Expand Down Expand Up @@ -65,19 +68,14 @@ public void applyToConfiguration(Configuration configuration) {

@Override
public boolean validate() {
if (authentication.equals(EMPTY)) {
return true;
}

if (authentication.equals("simple")) {
if (SIMPLE_AUTH.equals(authentication)) {
return true;
}

if (authentication.equals("kerberos")) {
if (KERBEROS_AUTH.equals(authentication)) {
if (krbPrincipal.isEmpty()) {
return false;
}
return !(krbKeyTabData.isEmpty() && krbKeyTabFile.isEmpty());
return !(krbKeyTabFile.isEmpty() && krbKeyTabData.isEmpty());
}

return false;
Expand All @@ -104,6 +102,13 @@ public FileStoreInfo toFileStoreInfo() {
FileStoreInfo.Builder fileStore = FileStoreInfo.newBuilder();
fileStore.setFsType(FileStoreType.HDFS);
HDFSFileStoreInfo.Builder hdfsFileStoreInfo = HDFSFileStoreInfo.newBuilder();
if (!authentication.isEmpty()) {
hdfsFileStoreInfo.putConfiguration(HDFS_AUTHENTICATION, authentication);
if (authentication.equals(SIMPLE_AUTH) && !userName.isEmpty()) {
hdfsFileStoreInfo.setUsername(userName);
}
}
hdfsFileStoreInfo.putAllConfiguration(hadoopConfiguration);
fileStore.setHdfsFsInfo(hdfsFileStoreInfo.build());
return fileStore.build();
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
// Copyright 2021-present StarRocks, Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package com.starrocks.credential.hdfs;

import com.starrocks.credential.CloudConfiguration;
import com.starrocks.credential.CloudConfigurationConstants;

import java.util.Map;

public class StrictHDFSCloudConfigurationProvider extends HDFSCloudConfigurationProvider {
@Override
public CloudConfiguration build(Map<String, String> properties) {
Map<String, String> prop = preprocessProperties(properties);

HDFSCloudCredential hdfsCloudCredential = new StrictHDFSCloudCredential(
getOrDefault(properties, CloudConfigurationConstants.HDFS_AUTHENTICATION),
getOrDefault(properties, CloudConfigurationConstants.HDFS_USERNAME,
CloudConfigurationConstants.HDFS_USERNAME_DEPRECATED),
getOrDefault(properties, CloudConfigurationConstants.HDFS_PASSWORD,
CloudConfigurationConstants.HDFS_PASSWORD_DEPRECATED),
getOrDefault(properties, CloudConfigurationConstants.HDFS_KERBEROS_PRINCIPAL,
CloudConfigurationConstants.HDFS_KERBEROS_PRINCIPAL_DEPRECATED),
getOrDefault(properties, CloudConfigurationConstants.HADOOP_KERBEROS_KEYTAB,
CloudConfigurationConstants.HDFS_KERBEROS_KEYTAB_DEPRECATED),
getOrDefault(properties, CloudConfigurationConstants.HADOOP_KERBEROS_KEYTAB_CONTENT,
CloudConfigurationConstants.HDFS_KERBEROS_KEYTAB_CONTENT_DEPRECATED),
prop
);
if (!hdfsCloudCredential.validate()) {
return null;
}

return new HDFSCloudConfiguration(hdfsCloudCredential);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
// Copyright 2021-present StarRocks, Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package com.starrocks.credential.hdfs;

import java.util.Map;

public class StrictHDFSCloudCredential extends HDFSCloudCredential {
public StrictHDFSCloudCredential(String authentication, String username, String password, String kerberosPrincipal,
String keytab, String keytabContent, Map<String, String> hadoopConfiguration) {
super(authentication, username, password, kerberosPrincipal, keytab, keytabContent, hadoopConfiguration);
}

@Override
public boolean validate() {
if (!authentication.isEmpty()) {
return authentication.equals(SIMPLE_AUTH) || authentication.equals(KERBEROS_AUTH);
}
return true;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import com.staros.proto.AzBlobCredentialInfo;
import com.staros.proto.AzBlobFileStoreInfo;
import com.staros.proto.FileStoreInfo;
import com.staros.proto.HDFSFileStoreInfo;
import com.staros.proto.S3FileStoreInfo;
import com.starrocks.common.io.Text;
import com.starrocks.common.io.Writable;
Expand All @@ -29,7 +30,6 @@
import com.starrocks.credential.CloudConfigurationConstants;
import com.starrocks.credential.CloudConfigurationFactory;
import com.starrocks.credential.CloudType;
import com.starrocks.credential.hdfs.HDFSCloudCredential;
import com.starrocks.persist.gson.GsonPostProcessable;
import com.starrocks.persist.gson.GsonUtils;
import com.starrocks.server.GlobalStateMgr;
Expand All @@ -44,9 +44,6 @@
import java.util.List;
import java.util.Map;

import static com.starrocks.credential.CloudConfigurationConstants.AZURE_BLOB_CONTAINER;
import static com.starrocks.credential.CloudConfigurationConstants.HDFS_AUTHENTICATION;

public class StorageVolume implements Writable, GsonPostProcessable {
public enum StorageVolumeType {
UNKNOWN,
Expand Down Expand Up @@ -93,7 +90,7 @@ public StorageVolume(String id, String name, String svt, List<String> locations,
this.params = new HashMap<>(params);
Map<String, String> configurationParams = new HashMap<>(params);
preprocessAuthenticationIfNeeded(configurationParams);
this.cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(configurationParams);
this.cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(configurationParams, true);
if (!isValidCloudConfiguration()) {
Gson gson = new Gson();
throw new SemanticException("Storage params is not valid " + gson.toJson(params));
Expand All @@ -107,7 +104,7 @@ public StorageVolume(StorageVolume sv) {
this.locations = new ArrayList<>(sv.locations);
this.comment = sv.comment;
this.enabled = sv.enabled;
this.cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(sv.params);
this.cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(sv.params, true);
this.params = new HashMap<>(sv.params);
}

Expand Down Expand Up @@ -249,7 +246,12 @@ public static Map<String, String> getParamsFromFileStoreInfo(FileStoreInfo fsInf
}
return params;
case HDFS:
// TODO
HDFSFileStoreInfo hdfsFileStoreInfo = fsInfo.getHdfsFsInfo();
params.putAll(hdfsFileStoreInfo.getConfigurationMap());
String userName = hdfsFileStoreInfo.getUsername();
if (!Strings.isNullOrEmpty(userName)) {
params.put(CloudConfigurationConstants.HDFS_USERNAME_DEPRECATED, userName);
}
return params;
case AZBLOB:
AzBlobFileStoreInfo azBlobFileStoreInfo = fsInfo.getAzblobFsInfo();
Expand All @@ -270,11 +272,9 @@ public static Map<String, String> getParamsFromFileStoreInfo(FileStoreInfo fsInf
}

private void preprocessAuthenticationIfNeeded(Map<String, String> params) {
if (svt == StorageVolumeType.HDFS) {
params.computeIfAbsent(HDFS_AUTHENTICATION, key -> HDFSCloudCredential.EMPTY);
} else if (svt == StorageVolumeType.AZBLOB) {
if (svt == StorageVolumeType.AZBLOB) {
String container = locations.get(0).split("/")[0];
params.put(AZURE_BLOB_CONTAINER, container);
params.put(CloudConfigurationConstants.AZURE_BLOB_CONTAINER, container);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -206,6 +206,13 @@ public void testHDFSCloudConfiguration() {
Assert.assertEquals(cc.toConfString(),
"HDFSCloudConfiguration{resources='', jars='', cred=HDFSCloudCredential{authentication='simple', username='XX'," +
" password='XX', krbPrincipal='', krbKeyTabFile='', krbKeyTabData=''}}");

map.clear();
cc = CloudConfigurationFactory.buildCloudConfigurationForStorage(map);
Assert.assertEquals(CloudType.DEFAULT, cc.getCloudType());

cc = CloudConfigurationFactory.buildCloudConfigurationForStorage(map, true);
Assert.assertEquals(CloudType.HDFS, cc.getCloudType());
}

@Test
Expand Down
Loading

0 comments on commit bba8c98

Please sign in to comment.