diff --git a/fe/fe-core/src/main/java/com/starrocks/credential/CloudConfigurationConstants.java b/fe/fe-core/src/main/java/com/starrocks/credential/CloudConfigurationConstants.java index 027a1ac816a7bc..0ce22454859275 100644 --- a/fe/fe-core/src/main/java/com/starrocks/credential/CloudConfigurationConstants.java +++ b/fe/fe-core/src/main/java/com/starrocks/credential/CloudConfigurationConstants.java @@ -98,6 +98,7 @@ public class CloudConfigurationConstants { public static final String HDFS_PASSWORD = "hadoop.password"; public static final String HDFS_KERBEROS_PRINCIPAL_DEPRECATED = "kerberos_principal"; public static final String HDFS_KERBEROS_PRINCIPAL = "hadoop.kerberos.principal"; + public static final String HDFS_KERBEROS_TICKET_CACHE_PATH = "hadoop.security.kerberos.ticket.cache.path"; @Deprecated public static final String HDFS_KERBEROS_KEYTAB_DEPRECATED = "kerberos_keytab"; public static final String HADOOP_KERBEROS_KEYTAB = "hadoop.kerberos.keytab"; diff --git a/fe/fe-core/src/main/java/com/starrocks/credential/CloudConfigurationFactory.java b/fe/fe-core/src/main/java/com/starrocks/credential/CloudConfigurationFactory.java index f89e2963072346..a61ac84f20da31 100644 --- a/fe/fe-core/src/main/java/com/starrocks/credential/CloudConfigurationFactory.java +++ b/fe/fe-core/src/main/java/com/starrocks/credential/CloudConfigurationFactory.java @@ -21,6 +21,7 @@ import com.starrocks.credential.azure.AzureCloudConfigurationProvider; import com.starrocks.credential.gcp.GCPCloudConfigurationProvoder; import com.starrocks.credential.hdfs.HDFSCloudConfigurationProvider; +import com.starrocks.credential.hdfs.StrictHDFSCloudConfigurationProvider; import com.starrocks.credential.tencent.TencentCloudConfigurationProvider; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.iceberg.aws.AwsProperties; @@ -37,15 +38,28 @@ public class CloudConfigurationFactory { new AliyunCloudConfigurationProvider(), new TencentCloudConfigurationProvider(), new HDFSCloudConfigurationProvider(), - new CloudConfigurationProvider() { - @Override - public CloudConfiguration build(Map properties) { - return new CloudConfiguration(); - } - }); + (Map properties) -> new CloudConfiguration()); + + static ImmutableList strictCloudConfigurationFactoryChain = ImmutableList.of( + new AWSCloudConfigurationProvider(), + new AzureCloudConfigurationProvider(), + new GCPCloudConfigurationProvoder(), + new AliyunCloudConfigurationProvider(), + new TencentCloudConfigurationProvider(), + new HDFSCloudConfigurationProvider(), + new StrictHDFSCloudConfigurationProvider(), + (Map properties) -> new CloudConfiguration()); public static CloudConfiguration buildCloudConfigurationForStorage(Map properties) { - for (CloudConfigurationProvider factory : cloudConfigurationFactoryChain) { + return buildCloudConfigurationForStorage(properties, false); + } + + public static CloudConfiguration buildCloudConfigurationForStorage(Map properties, boolean strictMode) { + ImmutableList factories = cloudConfigurationFactoryChain; + if (strictMode) { + factories = strictCloudConfigurationFactoryChain; + } + for (CloudConfigurationProvider factory : factories) { CloudConfiguration cloudConfiguration = factory.build(properties); if (cloudConfiguration != null) { cloudConfiguration.loadCommonFields(properties); diff --git a/fe/fe-core/src/main/java/com/starrocks/credential/hdfs/HDFSCloudConfigurationProvider.java b/fe/fe-core/src/main/java/com/starrocks/credential/hdfs/HDFSCloudConfigurationProvider.java index 97063173a361b8..a6962892e600f1 100644 --- a/fe/fe-core/src/main/java/com/starrocks/credential/hdfs/HDFSCloudConfigurationProvider.java +++ b/fe/fe-core/src/main/java/com/starrocks/credential/hdfs/HDFSCloudConfigurationProvider.java @@ -14,7 +14,7 @@ package com.starrocks.credential.hdfs; -import autovalue.shaded.com.google.common.common.base.Preconditions; +import com.google.common.base.Preconditions; import com.starrocks.credential.CloudConfiguration; import com.starrocks.credential.CloudConfigurationProvider; @@ -35,7 +35,7 @@ public class HDFSCloudConfigurationProvider implements CloudConfigurationProvider { - private static String getOrDefault(Map prop, String... args) { + protected static String getOrDefault(Map prop, String... args) { for (String k : args) { String v = prop.get(k); if (v != null) { @@ -45,8 +45,7 @@ private static String getOrDefault(Map prop, String... args) { return ""; } - @Override - public CloudConfiguration build(Map properties) { + protected Map preprocessProperties(Map properties) { Preconditions.checkNotNull(properties); Map prop = new HashMap<>(properties); @@ -59,6 +58,12 @@ public CloudConfiguration build(Map properties) { for (String k : keys) { prop.remove(k); } + return prop; + } + + @Override + public CloudConfiguration build(Map properties) { + Map prop = preprocessProperties(properties); HDFSCloudCredential hdfsCloudCredential = new HDFSCloudCredential( getOrDefault(properties, HDFS_AUTHENTICATION), @@ -72,7 +77,6 @@ public CloudConfiguration build(Map properties) { if (!hdfsCloudCredential.validate()) { return null; } - HDFSCloudConfiguration conf = new HDFSCloudConfiguration(hdfsCloudCredential); - return conf; + return new HDFSCloudConfiguration(hdfsCloudCredential); } } diff --git a/fe/fe-core/src/main/java/com/starrocks/credential/hdfs/HDFSCloudCredential.java b/fe/fe-core/src/main/java/com/starrocks/credential/hdfs/HDFSCloudCredential.java index 72ad0eeee908db..216d3752b392ed 100644 --- a/fe/fe-core/src/main/java/com/starrocks/credential/hdfs/HDFSCloudCredential.java +++ b/fe/fe-core/src/main/java/com/starrocks/credential/hdfs/HDFSCloudCredential.java @@ -23,9 +23,12 @@ import java.util.Map; +import static com.starrocks.credential.CloudConfigurationConstants.HDFS_AUTHENTICATION; + public class HDFSCloudCredential implements CloudCredential { - public static final String EMPTY = "empty"; - private String authentication; + public static final String SIMPLE_AUTH = "simple"; + public static final String KERBEROS_AUTH = "kerberos"; + protected String authentication; private String userName; private String password; private String krbPrincipal; @@ -65,19 +68,14 @@ public void applyToConfiguration(Configuration configuration) { @Override public boolean validate() { - if (authentication.equals(EMPTY)) { - return true; - } - - if (authentication.equals("simple")) { + if (SIMPLE_AUTH.equals(authentication)) { return true; } - - if (authentication.equals("kerberos")) { + if (KERBEROS_AUTH.equals(authentication)) { if (krbPrincipal.isEmpty()) { return false; } - return !(krbKeyTabData.isEmpty() && krbKeyTabFile.isEmpty()); + return !(krbKeyTabFile.isEmpty() && krbKeyTabData.isEmpty()); } return false; @@ -104,6 +102,13 @@ public FileStoreInfo toFileStoreInfo() { FileStoreInfo.Builder fileStore = FileStoreInfo.newBuilder(); fileStore.setFsType(FileStoreType.HDFS); HDFSFileStoreInfo.Builder hdfsFileStoreInfo = HDFSFileStoreInfo.newBuilder(); + if (!authentication.isEmpty()) { + hdfsFileStoreInfo.putConfiguration(HDFS_AUTHENTICATION, authentication); + if (authentication.equals(SIMPLE_AUTH) && !userName.isEmpty()) { + hdfsFileStoreInfo.setUsername(userName); + } + } + hdfsFileStoreInfo.putAllConfiguration(hadoopConfiguration); fileStore.setHdfsFsInfo(hdfsFileStoreInfo.build()); return fileStore.build(); } diff --git a/fe/fe-core/src/main/java/com/starrocks/credential/hdfs/StrictHDFSCloudConfigurationProvider.java b/fe/fe-core/src/main/java/com/starrocks/credential/hdfs/StrictHDFSCloudConfigurationProvider.java new file mode 100644 index 00000000000000..5b595b268946ab --- /dev/null +++ b/fe/fe-core/src/main/java/com/starrocks/credential/hdfs/StrictHDFSCloudConfigurationProvider.java @@ -0,0 +1,47 @@ +// Copyright 2021-present StarRocks, Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.starrocks.credential.hdfs; + +import com.starrocks.credential.CloudConfiguration; +import com.starrocks.credential.CloudConfigurationConstants; + +import java.util.Map; + +public class StrictHDFSCloudConfigurationProvider extends HDFSCloudConfigurationProvider { + @Override + public CloudConfiguration build(Map properties) { + Map prop = preprocessProperties(properties); + + HDFSCloudCredential hdfsCloudCredential = new StrictHDFSCloudCredential( + getOrDefault(properties, CloudConfigurationConstants.HDFS_AUTHENTICATION), + getOrDefault(properties, CloudConfigurationConstants.HDFS_USERNAME, + CloudConfigurationConstants.HDFS_USERNAME_DEPRECATED), + getOrDefault(properties, CloudConfigurationConstants.HDFS_PASSWORD, + CloudConfigurationConstants.HDFS_PASSWORD_DEPRECATED), + getOrDefault(properties, CloudConfigurationConstants.HDFS_KERBEROS_PRINCIPAL, + CloudConfigurationConstants.HDFS_KERBEROS_PRINCIPAL_DEPRECATED), + getOrDefault(properties, CloudConfigurationConstants.HADOOP_KERBEROS_KEYTAB, + CloudConfigurationConstants.HDFS_KERBEROS_KEYTAB_DEPRECATED), + getOrDefault(properties, CloudConfigurationConstants.HADOOP_KERBEROS_KEYTAB_CONTENT, + CloudConfigurationConstants.HDFS_KERBEROS_KEYTAB_CONTENT_DEPRECATED), + prop + ); + if (!hdfsCloudCredential.validate()) { + return null; + } + + return new HDFSCloudConfiguration(hdfsCloudCredential); + } +} diff --git a/fe/fe-core/src/main/java/com/starrocks/credential/hdfs/StrictHDFSCloudCredential.java b/fe/fe-core/src/main/java/com/starrocks/credential/hdfs/StrictHDFSCloudCredential.java new file mode 100644 index 00000000000000..b89d31f83a01c7 --- /dev/null +++ b/fe/fe-core/src/main/java/com/starrocks/credential/hdfs/StrictHDFSCloudCredential.java @@ -0,0 +1,32 @@ +// Copyright 2021-present StarRocks, Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.starrocks.credential.hdfs; + +import java.util.Map; + +public class StrictHDFSCloudCredential extends HDFSCloudCredential { + public StrictHDFSCloudCredential(String authentication, String username, String password, String kerberosPrincipal, + String keytab, String keytabContent, Map hadoopConfiguration) { + super(authentication, username, password, kerberosPrincipal, keytab, keytabContent, hadoopConfiguration); + } + + @Override + public boolean validate() { + if (!authentication.isEmpty()) { + return authentication.equals(SIMPLE_AUTH) || authentication.equals(KERBEROS_AUTH); + } + return true; + } +} diff --git a/fe/fe-core/src/main/java/com/starrocks/storagevolume/StorageVolume.java b/fe/fe-core/src/main/java/com/starrocks/storagevolume/StorageVolume.java index 79a965b60fd9fa..d5aeb1228acdad 100644 --- a/fe/fe-core/src/main/java/com/starrocks/storagevolume/StorageVolume.java +++ b/fe/fe-core/src/main/java/com/starrocks/storagevolume/StorageVolume.java @@ -21,6 +21,7 @@ import com.staros.proto.AzBlobCredentialInfo; import com.staros.proto.AzBlobFileStoreInfo; import com.staros.proto.FileStoreInfo; +import com.staros.proto.HDFSFileStoreInfo; import com.staros.proto.S3FileStoreInfo; import com.starrocks.common.io.Text; import com.starrocks.common.io.Writable; @@ -29,7 +30,6 @@ import com.starrocks.credential.CloudConfigurationConstants; import com.starrocks.credential.CloudConfigurationFactory; import com.starrocks.credential.CloudType; -import com.starrocks.credential.hdfs.HDFSCloudCredential; import com.starrocks.persist.gson.GsonPostProcessable; import com.starrocks.persist.gson.GsonUtils; import com.starrocks.server.GlobalStateMgr; @@ -44,9 +44,6 @@ import java.util.List; import java.util.Map; -import static com.starrocks.credential.CloudConfigurationConstants.AZURE_BLOB_CONTAINER; -import static com.starrocks.credential.CloudConfigurationConstants.HDFS_AUTHENTICATION; - public class StorageVolume implements Writable, GsonPostProcessable { public enum StorageVolumeType { UNKNOWN, @@ -93,7 +90,7 @@ public StorageVolume(String id, String name, String svt, List locations, this.params = new HashMap<>(params); Map configurationParams = new HashMap<>(params); preprocessAuthenticationIfNeeded(configurationParams); - this.cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(configurationParams); + this.cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(configurationParams, true); if (!isValidCloudConfiguration()) { Gson gson = new Gson(); throw new SemanticException("Storage params is not valid " + gson.toJson(params)); @@ -107,7 +104,7 @@ public StorageVolume(StorageVolume sv) { this.locations = new ArrayList<>(sv.locations); this.comment = sv.comment; this.enabled = sv.enabled; - this.cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(sv.params); + this.cloudConfiguration = CloudConfigurationFactory.buildCloudConfigurationForStorage(sv.params, true); this.params = new HashMap<>(sv.params); } @@ -249,7 +246,12 @@ public static Map getParamsFromFileStoreInfo(FileStoreInfo fsInf } return params; case HDFS: - // TODO + HDFSFileStoreInfo hdfsFileStoreInfo = fsInfo.getHdfsFsInfo(); + params.putAll(hdfsFileStoreInfo.getConfigurationMap()); + String userName = hdfsFileStoreInfo.getUsername(); + if (!Strings.isNullOrEmpty(userName)) { + params.put(CloudConfigurationConstants.HDFS_USERNAME_DEPRECATED, userName); + } return params; case AZBLOB: AzBlobFileStoreInfo azBlobFileStoreInfo = fsInfo.getAzblobFsInfo(); @@ -270,11 +272,9 @@ public static Map getParamsFromFileStoreInfo(FileStoreInfo fsInf } private void preprocessAuthenticationIfNeeded(Map params) { - if (svt == StorageVolumeType.HDFS) { - params.computeIfAbsent(HDFS_AUTHENTICATION, key -> HDFSCloudCredential.EMPTY); - } else if (svt == StorageVolumeType.AZBLOB) { + if (svt == StorageVolumeType.AZBLOB) { String container = locations.get(0).split("/")[0]; - params.put(AZURE_BLOB_CONTAINER, container); + params.put(CloudConfigurationConstants.AZURE_BLOB_CONTAINER, container); } } diff --git a/fe/fe-core/src/test/java/com/starrocks/credential/CloudConfigurationFactoryTest.java b/fe/fe-core/src/test/java/com/starrocks/credential/CloudConfigurationFactoryTest.java index 07552b96f7b608..f3bc8f2fa771cf 100644 --- a/fe/fe-core/src/test/java/com/starrocks/credential/CloudConfigurationFactoryTest.java +++ b/fe/fe-core/src/test/java/com/starrocks/credential/CloudConfigurationFactoryTest.java @@ -206,6 +206,13 @@ public void testHDFSCloudConfiguration() { Assert.assertEquals(cc.toConfString(), "HDFSCloudConfiguration{resources='', jars='', cred=HDFSCloudCredential{authentication='simple', username='XX'," + " password='XX', krbPrincipal='', krbKeyTabFile='', krbKeyTabData=''}}"); + + map.clear(); + cc = CloudConfigurationFactory.buildCloudConfigurationForStorage(map); + Assert.assertEquals(CloudType.DEFAULT, cc.getCloudType()); + + cc = CloudConfigurationFactory.buildCloudConfigurationForStorage(map, true); + Assert.assertEquals(CloudType.HDFS, cc.getCloudType()); } @Test diff --git a/fe/fe-core/src/test/java/com/starrocks/storagevolume/StorageVolumeTest.java b/fe/fe-core/src/test/java/com/starrocks/storagevolume/StorageVolumeTest.java index fb4c1f7a3d0472..46759567dd3221 100644 --- a/fe/fe-core/src/test/java/com/starrocks/storagevolume/StorageVolumeTest.java +++ b/fe/fe-core/src/test/java/com/starrocks/storagevolume/StorageVolumeTest.java @@ -22,6 +22,7 @@ import com.staros.proto.AzBlobFileStoreInfo; import com.staros.proto.FileStoreInfo; import com.staros.proto.FileStoreType; +import com.staros.proto.HDFSFileStoreInfo; import com.staros.proto.S3FileStoreInfo; import com.starrocks.common.AnalysisException; import com.starrocks.common.DdlException; @@ -63,9 +64,11 @@ import static com.starrocks.credential.CloudConfigurationConstants.AZURE_BLOB_ENDPOINT; import static com.starrocks.credential.CloudConfigurationConstants.AZURE_BLOB_SAS_TOKEN; import static com.starrocks.credential.CloudConfigurationConstants.AZURE_BLOB_SHARED_KEY; -import static com.starrocks.credential.CloudConfigurationConstants.HADOOP_KERBEROS_KEYTAB; import static com.starrocks.credential.CloudConfigurationConstants.HDFS_AUTHENTICATION; -import static com.starrocks.credential.CloudConfigurationConstants.HDFS_KERBEROS_PRINCIPAL; +import static com.starrocks.credential.CloudConfigurationConstants.HDFS_KERBEROS_KEYTAB_CONTENT_DEPRECATED; +import static com.starrocks.credential.CloudConfigurationConstants.HDFS_KERBEROS_KEYTAB_DEPRECATED; +import static com.starrocks.credential.CloudConfigurationConstants.HDFS_KERBEROS_PRINCIPAL_DEPRECATED; +import static com.starrocks.credential.CloudConfigurationConstants.HDFS_KERBEROS_TICKET_CACHE_PATH; import static com.starrocks.credential.CloudConfigurationConstants.HDFS_PASSWORD; import static com.starrocks.credential.CloudConfigurationConstants.HDFS_USERNAME; @@ -195,7 +198,7 @@ public void testAWSInvalidCredential() { @Test public void testHDFSSimpleCredential() { Map storageParams = new HashMap<>(); - storageParams.put(HDFS_AUTHENTICATION, "simple"); + storageParams.put(HDFS_AUTHENTICATION, HDFSCloudCredential.SIMPLE_AUTH); storageParams.put(HDFS_USERNAME, "username"); storageParams.put(HDFS_PASSWORD, "password"); storageParams.put("dfs.nameservices", "ha_cluster"); @@ -210,11 +213,25 @@ public void testHDFSSimpleCredential() { CloudConfiguration cloudConfiguration = sv.getCloudConfiguration(); Assert.assertEquals(CloudType.HDFS, cloudConfiguration.getCloudType()); HDFSCloudConfiguration hdfsCloudConfiguration = (HDFSCloudConfiguration) cloudConfiguration; - Assert.assertEquals("simple", hdfsCloudConfiguration.getHdfsCloudCredential().getAuthentication()); + Assert.assertEquals(HDFSCloudCredential.SIMPLE_AUTH, hdfsCloudConfiguration.getHdfsCloudCredential().getAuthentication()); Assert.assertEquals(5, hdfsCloudConfiguration.getHdfsCloudCredential().getHadoopConfiguration().size()); + FileStoreInfo fileStore = cloudConfiguration.toFileStoreInfo(); + Assert.assertEquals(FileStoreType.HDFS, fileStore.getFsType()); + Assert.assertTrue(fileStore.hasHdfsFsInfo()); + HDFSFileStoreInfo hdfsFileStoreInfo = fileStore.getHdfsFsInfo(); + Assert.assertEquals("username", hdfsFileStoreInfo.getUsername()); + Assert.assertEquals("simple", hdfsFileStoreInfo.getConfigurationMap().get(HDFS_AUTHENTICATION)); + Assert.assertEquals("ha_cluster", hdfsFileStoreInfo.getConfigurationMap().get("dfs.nameservices")); + Assert.assertEquals("ha_n1,ha_n2", hdfsFileStoreInfo.getConfigurationMap().get("dfs.ha.namenodes.ha_cluster")); + Assert.assertEquals(":", + hdfsFileStoreInfo.getConfiguration().get("dfs.namenode.rpc-address.ha_cluster.ha_n1")); + Assert.assertEquals(":", + hdfsFileStoreInfo.getConfiguration().get("dfs.namenode.rpc-address.ha_cluster.ha_n2")); + Assert.assertEquals("org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider", + hdfsFileStoreInfo.getConfiguration().get("dfs.client.failover.proxy.provider")); Map storageParams1 = new HashMap<>(); - storageParams1.put(HDFS_AUTHENTICATION, "simple"); + storageParams1.put(HDFS_AUTHENTICATION, HDFSCloudCredential.SIMPLE_AUTH); storageParams1.put(HDFS_USERNAME, "username"); storageParams1.put(HDFS_PASSWORD, "password"); sv = new StorageVolume("2", "test", "hdfs", Arrays.asList("hdfs://abc"), @@ -222,16 +239,22 @@ public void testHDFSSimpleCredential() { cloudConfiguration = sv.getCloudConfiguration(); Assert.assertEquals(CloudType.HDFS, cloudConfiguration.getCloudType()); hdfsCloudConfiguration = (HDFSCloudConfiguration) cloudConfiguration; - Assert.assertEquals("simple", hdfsCloudConfiguration.getHdfsCloudCredential().getAuthentication()); + Assert.assertEquals(HDFSCloudCredential.SIMPLE_AUTH, hdfsCloudConfiguration.getHdfsCloudCredential().getAuthentication()); Assert.assertEquals(0, hdfsCloudConfiguration.getHdfsCloudCredential().getHadoopConfiguration().size()); + fileStore = cloudConfiguration.toFileStoreInfo(); + Assert.assertEquals(FileStoreType.HDFS, fileStore.getFsType()); + Assert.assertTrue(fileStore.hasHdfsFsInfo()); + hdfsFileStoreInfo = fileStore.getHdfsFsInfo(); + Assert.assertEquals("username", hdfsFileStoreInfo.getUsername()); } @Test public void testHDFSKerberosCredential() throws AnalysisException { Map storageParams = new HashMap<>(); - storageParams.put(HDFS_AUTHENTICATION, "kerberos"); - storageParams.put(HDFS_KERBEROS_PRINCIPAL, "nn/abc@ABC.COM"); - storageParams.put(HADOOP_KERBEROS_KEYTAB, "/keytab/hive.keytab"); + storageParams.put(HDFS_AUTHENTICATION, HDFSCloudCredential.KERBEROS_AUTH); + storageParams.put(HDFS_KERBEROS_PRINCIPAL_DEPRECATED, "nn/abc@ABC.COM"); + storageParams.put(HDFS_KERBEROS_KEYTAB_DEPRECATED, "/keytab/hive.keytab"); + storageParams.put(HDFS_KERBEROS_KEYTAB_CONTENT_DEPRECATED, "YWFhYWFh"); storageParams.put("dfs.nameservices", "ha_cluster"); storageParams.put("dfs.ha.namenodes.ha_cluster", "ha_n1,ha_n2"); storageParams.put("dfs.namenode.rpc-address.ha_cluster.ha_n1", ":"); @@ -246,6 +269,39 @@ public void testHDFSKerberosCredential() throws AnalysisException { HDFSCloudConfiguration hdfsCloudConfiguration = (HDFSCloudConfiguration) cloudConfiguration; Assert.assertEquals("kerberos", hdfsCloudConfiguration.getHdfsCloudCredential().getAuthentication()); Assert.assertEquals(5, hdfsCloudConfiguration.getHdfsCloudCredential().getHadoopConfiguration().size()); + FileStoreInfo fileStore = cloudConfiguration.toFileStoreInfo(); + Assert.assertEquals(FileStoreType.HDFS, fileStore.getFsType()); + Assert.assertTrue(fileStore.hasHdfsFsInfo()); + HDFSFileStoreInfo hdfsFileStoreInfo = fileStore.getHdfsFsInfo(); + Assert.assertEquals(HDFSCloudCredential.KERBEROS_AUTH, hdfsFileStoreInfo.getConfigurationMap().get(HDFS_AUTHENTICATION)); + Assert.assertEquals(5, hdfsCloudConfiguration.getHdfsCloudCredential().getHadoopConfiguration().size()); + Assert.assertEquals("ha_cluster", hdfsFileStoreInfo.getConfigurationMap().get("dfs.nameservices")); + Assert.assertEquals("ha_n1,ha_n2", hdfsFileStoreInfo.getConfigurationMap().get("dfs.ha.namenodes.ha_cluster")); + Assert.assertEquals(":", + hdfsFileStoreInfo.getConfigurationMap().get("dfs.namenode.rpc-address.ha_cluster.ha_n1")); + Assert.assertEquals(":", + hdfsFileStoreInfo.getConfigurationMap().get("dfs.namenode.rpc-address.ha_cluster.ha_n2")); + Assert.assertEquals("org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider", + hdfsFileStoreInfo.getConfigurationMap().get("dfs.client.failover.proxy.provider")); + + storageParams.clear(); + storageParams.put(HDFS_AUTHENTICATION, HDFSCloudCredential.KERBEROS_AUTH); + storageParams.put(HDFS_KERBEROS_TICKET_CACHE_PATH, "/path/to/ticket/cache/path"); + sv = new StorageVolume("1", "test", "hdfs", Arrays.asList("hdfs://abc"), + storageParams, true, ""); + cloudConfiguration = sv.getCloudConfiguration(); + Assert.assertEquals(CloudType.HDFS, cloudConfiguration.getCloudType()); + hdfsCloudConfiguration = (HDFSCloudConfiguration) cloudConfiguration; + Assert.assertEquals(HDFSCloudCredential.KERBEROS_AUTH, + hdfsCloudConfiguration.getHdfsCloudCredential().getAuthentication()); + Assert.assertEquals(1, hdfsCloudConfiguration.getHdfsCloudCredential().getHadoopConfiguration().size()); + fileStore = cloudConfiguration.toFileStoreInfo(); + Assert.assertEquals(FileStoreType.HDFS, fileStore.getFsType()); + Assert.assertTrue(fileStore.hasHdfsFsInfo()); + hdfsFileStoreInfo = fileStore.getHdfsFsInfo(); + Assert.assertEquals("kerberos", hdfsFileStoreInfo.getConfigurationMap().get(HDFS_AUTHENTICATION)); + Assert.assertEquals("/path/to/ticket/cache/path", + hdfsFileStoreInfo.getConfigurationMap().get(HDFS_KERBEROS_TICKET_CACHE_PATH)); } @Test @@ -256,12 +312,30 @@ public void testHDFSEmptyCredential() { CloudConfiguration cloudConfiguration = sv.getCloudConfiguration(); Assert.assertEquals(CloudType.HDFS, cloudConfiguration.getCloudType()); HDFSCloudConfiguration hdfsCloudConfiguration = (HDFSCloudConfiguration) cloudConfiguration; - Assert.assertEquals(hdfsCloudConfiguration.getHdfsCloudCredential().getAuthentication(), HDFSCloudCredential.EMPTY); FileStoreInfo fileStore = cloudConfiguration.toFileStoreInfo(); Assert.assertEquals(FileStoreType.HDFS, fileStore.getFsType()); Assert.assertTrue(fileStore.hasHdfsFsInfo()); } + @Test + public void testHDFSViewFS() { + Map storageParams = new HashMap<>(); + storageParams.put("fs.viewfs.mounttable.ClusterX.link./data", "hdfs://nn1-clusterx.example.com:8020/data"); + storageParams.put("fs.viewfs.mounttable.ClusterX.link./project", "hdfs://nn2-clusterx.example.com:8020/project"); + StorageVolume sv = new StorageVolume("1", "test", "hdfs", Arrays.asList("hdfs://abc"), + storageParams, true, ""); + CloudConfiguration cloudConfiguration = sv.getCloudConfiguration(); + Assert.assertEquals(CloudType.HDFS, cloudConfiguration.getCloudType()); + FileStoreInfo fileStore = cloudConfiguration.toFileStoreInfo(); + Assert.assertEquals(FileStoreType.HDFS, fileStore.getFsType()); + Assert.assertTrue(fileStore.hasHdfsFsInfo()); + HDFSFileStoreInfo hdfsFileStoreInfo = fileStore.getHdfsFsInfo(); + Assert.assertEquals("hdfs://nn1-clusterx.example.com:8020/data", + hdfsFileStoreInfo.getConfigurationMap().get("fs.viewfs.mounttable.ClusterX.link./data")); + Assert.assertEquals("hdfs://nn2-clusterx.example.com:8020/project", + hdfsFileStoreInfo.getConfigurationMap().get("fs.viewfs.mounttable.ClusterX.link./project")); + } + @Test public void testHDFSAddConfigResources() { String runningDir = MockedFrontend.getInstance().getRunningDir(); @@ -388,6 +462,26 @@ public void testFromFileStoreInfo() { fs = FileStoreInfo.newBuilder().setS3FsInfo(s3fs).setFsKey("0").setFsType(FileStoreType.S3).build(); sv = StorageVolume.fromFileStoreInfo(fs); Assert.assertEquals(CloudType.AWS, sv.getCloudConfiguration().getCloudType()); + + HDFSFileStoreInfo hdfs = HDFSFileStoreInfo.newBuilder().setUsername("username") + .putConfiguration(HDFS_AUTHENTICATION, "simple") + .putConfiguration("dfs.nameservices", "ha_cluster") + .putConfiguration("dfs.ha.namenodes.ha_cluster", "ha_n1,ha_n2") + .putConfiguration("dfs.namenode.rpc-address.ha_cluster.ha_n1", ":") + .putConfiguration("dfs.namenode.rpc-address.ha_cluster.ha_n2", ":") + .putConfiguration("dfs.client.failover.proxy.provider", + "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider").build(); + fs = FileStoreInfo.newBuilder().setHdfsFsInfo(hdfs).setFsKey("0").setFsType(FileStoreType.HDFS).build(); + sv = StorageVolume.fromFileStoreInfo(fs); + Assert.assertEquals(CloudType.HDFS, sv.getCloudConfiguration().getCloudType()); + + hdfs = HDFSFileStoreInfo.newBuilder().putConfiguration(HDFS_AUTHENTICATION, "kerberos") + .putConfiguration(HDFS_KERBEROS_PRINCIPAL_DEPRECATED, "nn/abc@ABC.COM") + .putConfiguration(HDFS_KERBEROS_KEYTAB_DEPRECATED, "/keytab/hive.keytab") + .putConfiguration(HDFS_KERBEROS_KEYTAB_CONTENT_DEPRECATED, "YWFhYWFh").build(); + fs = FileStoreInfo.newBuilder().setHdfsFsInfo(hdfs).setFsKey("0").setFsType(FileStoreType.HDFS).build(); + sv = StorageVolume.fromFileStoreInfo(fs); + Assert.assertEquals(CloudType.HDFS, sv.getCloudConfiguration().getCloudType()); } @Test