import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.permission.AclStatus;
import org.apache.hadoop.security.UserGroupInformation;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
public class HdfsKerberosDemo {
public static void main(String[] args) throws IOException {
// -Djava.security.krb5.conf=/home/xxx/kerberos/krb5.conf
// -Dkeytab.path=/home/xxx/kerberos/hive.service.keytab
String krb5File = "/xxx/krb5.conf";
String fileName = krb5File.substring(krb5File.lastIndexOf("/")+1);
String tempConfPath = "/root/temp/" + fileName;
try {
download(krb5File, tempConfPath);
} catch (IOException e) {
File folder = new File("/root/temp");
String[] files = folder.list();
boolean fileExists = false;
if (files != null) {
for (String file : files) {
if (file.equals(fileName)) {
fileExists = true;
break;
}
}
}
if (fileExists) {
//log.info("-------------------krb5conf文件存在-------------");
try {
Files.delete(Paths.get(tempConfPath));
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
throw new RuntimeException("獲取krb5.conf文件失敗");
}
/** 設(shè)置krb5.conf到環(huán)境變量*/
System.setProperty("java.security.krb5.conf", tempConfPath);
String keytabFile = "/xxx/krb5.conf";
String keytabName = keytabFile.substring(keytabFile.lastIndexOf("/")+1);
String tempKeytabPath = "/root/temp/" + keytabName;
try {
download(keytabFile, tempKeytabPath);
} catch (IOException e) {
File folder = new File("/root/temp");
String[] files = folder.list();
boolean fileExists = false;
if (files != null) {
for (String file : files) {
if (file.equals(keytabName)) {
fileExists = true;
break;
}
}
}
if (fileExists) {
//log.info("-------------------krb5conf文件存在-------------");
try {
Files.delete(Paths.get(tempKeytabPath));
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
throw new RuntimeException("獲取krb5.conf文件失敗");
}
System.setProperty("keytab.path", tempKeytabPath);
// 加載Hadoop配置
Configuration configuration = new Configuration();
configuration.addResource(new Path(HdfsKerberosExample.class.getClassLoader().getResource("core-site.xml").getPath()));
configuration.addResource(new Path(HdfsKerberosExample.class.getClassLoader().getResource("hdfs-site.xml").getPath()));
// 檢測(cè)kerberos認(rèn)證配置文件
String krb5conf = System.getProperty("java.security.krb5.conf");
String keytabPath = System.getProperty("keytab.path");
if (krb5conf == null) {
System.out.println("未找到krb5.conf节吮,請(qǐng)配置VMOptions[java.security.krb5.conf]");
return;
} else if (keytabPath == null) {
System.out.println("未找到krb5.conf,請(qǐng)配置VMOptions[keytab.path]");
return;
}
System.setProperty("javax.security.auth.useSubjectCredsOnly", "false");
configuration.set("fs.defaultFS", "hdfs://xxxx"); //HDFS地址
//configuration.setClassLoader(org.apache.hadoop.hdfs.DistributedFileSystem.class.getClassLoader());
configuration.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
configuration.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
// 啟用keytab renewal
configuration.set("hadoop.kerberos.keytab.login.autorenewal.enabled", "true");
configuration.set("hadoop.security.authentication", "Kerberos");
// 使用UserGroupInformation進(jìn)行認(rèn)證
UserGroupInformation.setConfiguration(configuration);
UserGroupInformation.loginUserFromKeytab("hive/datasophon01@HADOOP.COM", keytabPath);
System.out.println("====== 打印當(dāng)前登錄用戶 START =====");
//System.out.println("user:" + UserGroupInformation.getCurrentUser());
System.out.println("====== 打印當(dāng)前登錄用戶 END =====\n");
// 創(chuàng)建FileSystem實(shí)例
FileSystem fileSystem = FileSystem.get(configuration);
// 創(chuàng)建根路徑
Path rootPath = new Path("/data");
System.out.println("====== ACL =====");
// 打印 ACL 內(nèi)容
/*AclStatus aclStatus = fileSystem.getAclStatus(rootPath);
System.out.println(aclStatus);
System.out.println();*/
System.out.println(fileSystem.getStatus(new Path("/data")));
System.out.println("======= ROOT(/) Files ======");
RemoteIterator<LocatedFileStatus> list = FileSystem.get(configuration).listFiles(rootPath,true);
while (list.hasNext()){
LocatedFileStatus fileStatus =list.next();
System.out.println("文件路徑為" + fileStatus.getPath());
}
/*RemoteIterator<LocatedFileStatus> fileStatus = fileSystem.listFiles(rootPath,true);
System.out.println("成功獲取文件系統(tǒng)友题,正在導(dǎo)出文件系統(tǒng)內(nèi)容,請(qǐng)稍后...");
FileWriter writer = new FileWriter("hdfs-files.txt");
while (fileStatus.hasNext()){
LocatedFileStatus st = fileStatus.next();
writer.write(st.getPath().toString());
}
writer.close();*/
System.out.println("hdfs文件內(nèi)容寫(xiě)入成功");
// 關(guān)閉認(rèn)證用戶
// UserGroupInformation.getLoginUser().logout();
UserGroupInformation.getLoginUser().logoutUserFromKeytab();
}
private static void download(String url, String localPath) throws IOException {
URL website = new URL(url);
try (InputStream in = website.openStream()) {
Files.copy(in, Paths.get(localPath), StandardCopyOption.REPLACE_EXISTING);
}
}
}
win11上執(zhí)行上述代碼正常场晶,但在linux服務(wù)器上執(zhí)行同樣的代碼,用FileSystem Client去操作HDFS文件的時(shí)候伪冰,應(yīng)用報(bào)如下的錯(cuò)誤:org.apache.hadoop.security.AccessControlException: Client cannot authenticate via:[TOKEN, KERBERO]蹦渣。
在確認(rèn)了集群已經(jīng)是開(kāi)啟Kerberos認(rèn)證之后哄芜,去看詳細(xì)的日志相關(guān)信息,看到了如下的提示:
Login successful for user hdfs/xxxx@xxx.COM using keytab file /root/temp/xxxKEYTABFILE
說(shuō)明應(yīng)用端的Kerberos認(rèn)證其實(shí)已經(jīng)通過(guò)了柬唯,但是在操作HDFS文件的時(shí)候?yàn)槭裁催€是報(bào)了Client cannot authenticate via:[TOKEN, KERBEROS]的錯(cuò)认臊。
解決:換成如下代碼
String krb5File = "/xxx/krb5.conf";
String fileName = krb5File.substring(krb5File.lastIndexOf("/")+1);
String tempConfPath = "D:\\Download\\" + fileName;
try {
download(krb5File, tempConfPath);
} catch (IOException e) {
File folder = new File("D:\\Download");
String[] files = folder.list();
boolean fileExists = false;
if (files != null) {
for (String file : files) {
if (file.equals(fileName)) {
fileExists = true;
break;
}
}
}
if (fileExists) {
//log.info("-------------------krb5conf文件存在-------------");
try {
Files.delete(Paths.get(tempConfPath));
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
throw new RuntimeException("獲取krb5.conf文件失敗");
}
/** 設(shè)置krb5.conf到環(huán)境變量*/
System.setProperty("java.security.krb5.conf", tempConfPath);
String keytabFile = "/xxx/krb5.conf";
String keytabName = keytabFile.substring(keytabFile.lastIndexOf("/")+1);
String tempKeytabPath = "D:\\Download\\" + keytabName;
try {
download(keytabFile, tempKeytabPath);
} catch (IOException e) {
File folder = new File("D:\\Download");
String[] files = folder.list();
boolean fileExists = false;
if (files != null) {
for (String file : files) {
if (file.equals(keytabName)) {
fileExists = true;
break;
}
}
}
if (fileExists) {
//log.info("-------------------krb5conf文件存在-------------");
try {
Files.delete(Paths.get(tempKeytabPath));
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
throw new RuntimeException("獲取krb5.conf文件失敗");
}
System.setProperty("keytab.path", tempKeytabPath);
// 加載Hadoop配置
Configuration configuration = new Configuration();
configuration.addResource(new Path(HdfsKerberosExample.class.getClassLoader().getResource("core-site.xml").getPath()));
configuration.addResource(new Path(HdfsKerberosExample.class.getClassLoader().getResource("hdfs-site.xml").getPath()));
// 檢測(cè)kerberos認(rèn)證配置文件
String krb5conf = System.getProperty("java.security.krb5.conf");
String keytabPath = System.getProperty("keytab.path");
if (krb5conf == null) {
System.out.println("未找到krb5.conf,請(qǐng)配置VMOptions[java.security.krb5.conf]");
return;
} else if (keytabPath == null) {
System.out.println("未找到krb5.conf锄奢,請(qǐng)配置VMOptions[keytab.path]");
return;
}
System.setProperty("javax.security.auth.useSubjectCredsOnly", "false");
configuration.set("fs.defaultFS", "hdfs://" + linkParameter.getHdfsAddress()); //HDFS地址
configuration.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
//configuration.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
Thread.currentThread().setContextClassLoader(org.apache.hadoop.hdfs.DistributedFileSystem.class.getClassLoader());
configuration.setClassLoader(org.apache.hadoop.hdfs.DistributedFileSystem.class.getClassLoader());
//configuration.setClassLoader(org.apache.hadoop.hdfs.DistributedFileSystem.class.getClassLoader());
// 啟用keytab renewal
configuration.set("hadoop.kerberos.keytab.login.autorenewal.enabled", "true");
configuration.set("hadoop.security.authentication", taskDatasourceConfigDTO.getAuthentication());
// 使用UserGroupInformation進(jìn)行認(rèn)證
UserGroupInformation.setConfiguration(configuration);
UserGroupInformation.loginUserFromKeytab("hive/datasophon01@HADOOP.COM", keytabPath);
System.out.println("====== 打印當(dāng)前登錄用戶 START =====");
//System.out.println("user:" + UserGroupInformation.getCurrentUser());
System.out.println("====== 打印當(dāng)前登錄用戶 END =====\n");
// 創(chuàng)建FileSystem實(shí)例
FileSystem fileSystem = FileSystem.get(configuration);
// 創(chuàng)建根路徑
Path rootPath = new Path("/data");
System.out.println("====== ACL =====");
// 打印 ACL 內(nèi)容
/*AclStatus aclStatus = fileSystem.getAclStatus(rootPath);
System.out.println(aclStatus);
System.out.println();*/
System.out.println(fileSystem.getStatus(new Path("/data")));
System.out.println("======= ROOT(/) Files ======");
RemoteIterator<LocatedFileStatus> list = FileSystem.get(configuration).listFiles(rootPath,true);
while (list.hasNext()){
LocatedFileStatus fileStatus =list.next();
System.out.println("文件路徑為" + fileStatus.getPath());
}
/*RemoteIterator<LocatedFileStatus> fileStatus = fileSystem.listFiles(rootPath,true);
System.out.println("成功獲取文件系統(tǒng)失晴,正在導(dǎo)出文件系統(tǒng)內(nèi)容,請(qǐng)稍后...");
FileWriter writer = new FileWriter("hdfs-files.txt");
while (fileStatus.hasNext()){
LocatedFileStatus st = fileStatus.next();
writer.write(st.getPath().toString());
}
writer.close();*/
System.out.println("hdfs文件內(nèi)容寫(xiě)入成功");
// 關(guān)閉認(rèn)證用戶
// UserGroupInformation.getLoginUser().logout();
UserGroupInformation.getLoginUser().logoutUserFromKeytab();
核心是如下三行
configuration.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());// configuration.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
Thread.currentThread().setContextClassLoader(org.apache.hadoop.hdfs.DistributedFileSystem.class.getClassLoader());
configuration.setClassLoader(org.apache.hadoop.hdfs.DistributedFileSystem.class.getClassLoader());