Java org.apache.hadoop.security.AccessControlException:客户端无法通过:[令牌,KERBEROS]问题进行身份验证
我正在使用java客户端通过Kerberos身份验证访问HDFS安全。 我试着在服务器上键入klist,它显示了一个已经存在的有效票证。 我收到的异常是客户端无法通过:[令牌,KERBEROS]进行身份验证。 非常感谢您的帮助。 这是一个错误日志 Java8、CDH5.12.1、Hadoop2.6 这是java代码Java org.apache.hadoop.security.AccessControlException:客户端无法通过:[令牌,KERBEROS]问题进行身份验证,java,hadoop,hdfs,kerberos,cloudera,Java,Hadoop,Hdfs,Kerberos,Cloudera,我正在使用java客户端通过Kerberos身份验证访问HDFS安全。 我试着在服务器上键入klist,它显示了一个已经存在的有效票证。 我收到的异常是客户端无法通过:[令牌,KERBEROS]进行身份验证。 非常感谢您的帮助。 这是一个错误日志 Java8、CDH5.12.1、Hadoop2.6 这是java代码 final String CONF_CORE_SITE = "/etc/hadoop/conf/core-site.xml"; final String CONF
final String CONF_CORE_SITE = "/etc/hadoop/conf/core-site.xml";
final String CONF_HDFS_SITE = "/etc/hadoop/conf/hdfs-site.xml";
Configuration configuration = new Configuration();
configuration.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
configuration.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
configuration.set("hadoop.rpc.protection", "privacy");
File hadoopCoreConfig = new File(CONF_CORE_SITE);
File hadoopHdfsConfig = new File(CONF_HDFS_SITE);
if (! hadoopCoreConfig.exists() || ! hadoopHdfsConfig.exists()) {
throw new FileNotFoundException("Files core-site.xml or hdfs-site.xml are not found. Check /etc/hadoop/conf/ path.");
}
configuration.addResource(new Path(hadoopCoreConfig.toURI()));
configuration.addResource(new Path(hadoopHdfsConfig.toURI()));
//Use existing security context created by $ kinit
UserGroupInformation.setConfiguration(configuration);
UserGroupInformation.loginUserFromKeytab("dwh_udp@abc.com", "/home/dwh_udp/dwh_udp.keytab");
UserGroupInformation.loginUserFromSubject(null);
URI uri = URI.create("hdfs://****");
FileSystem fs = FileSystem.get(uri,configuration,"User");
FileStatus[] fsStatus = fs.listStatus(new Path("/"));
for(int i = 0; i <= fsStatus.length; i++){
System.out.println(fsStatus[i].getPath().toString());
}
最后一个字符串CONF_CORE_SITE=“/etc/hadoop/CONF/CORE SITE.xml”;
最后一个字符串CONF_HDFS_SITE=“/etc/hadoop/CONF/HDFS SITE.xml”;
配置=新配置();
set(“fs.file.impl”,org.apache.hadoop.fs.LocalFileSystem.class.getName());
set(“fs.hdfs.impl”,org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
set(“hadoop.rpc.protection”、“privacy”);
文件hadoopCoreConfig=新文件(CONF_CORE_站点);
File hadoopHdfsConfig=新文件(CONF_HDFS_站点);
如果(!hadoopCoreConfig.exists()| |!hadoopHdfsConfig.exists()){
抛出新的FileNotFoundException(“找不到core-site.xml或hdfs-site.xml文件。请检查/etc/hadoop/conf/path”);
}
addResource(新路径(hadoopCoreConfig.toURI());
addResource(新路径(hadoopHdfsConfig.toURI());
//使用$kinit创建的现有安全上下文
UserGroupInformation.setConfiguration(配置);
UserGroupInformation.loginUserFromKeytab(“dwh_udp@abc.com“,”/home/dwh_udp/dwh_udp.keytab”);
UserGroupInformation.loginUserFromSubject(空);
URI=URI.create(“hdfs://**”);
FileSystem fs=FileSystem.get(uri,配置,“用户”);
FileStatus[]fsStatus=fs.listStatus(新路径(“/”);
对于(int i=0;i
final String CONF_CORE_SITE = "/etc/hadoop/conf/core-site.xml";
final String CONF_HDFS_SITE = "/etc/hadoop/conf/hdfs-site.xml";
Configuration configuration = new Configuration();
configuration.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
configuration.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
configuration.set("hadoop.rpc.protection", "privacy");
File hadoopCoreConfig = new File(CONF_CORE_SITE);
File hadoopHdfsConfig = new File(CONF_HDFS_SITE);
if (! hadoopCoreConfig.exists() || ! hadoopHdfsConfig.exists()) {
throw new FileNotFoundException("Files core-site.xml or hdfs-site.xml are not found. Check /etc/hadoop/conf/ path.");
}
configuration.addResource(new Path(hadoopCoreConfig.toURI()));
configuration.addResource(new Path(hadoopHdfsConfig.toURI()));
//Use existing security context created by $ kinit
UserGroupInformation.setConfiguration(configuration);
UserGroupInformation.loginUserFromKeytab("dwh_udp@abc.com", "/home/dwh_udp/dwh_udp.keytab");
UserGroupInformation.loginUserFromSubject(null);
URI uri = URI.create("hdfs://****");
FileSystem fs = FileSystem.get(uri,configuration,"User");
FileStatus[] fsStatus = fs.listStatus(new Path("/"));
for(int i = 0; i <= fsStatus.length; i++){
System.out.println(fsStatus[i].getPath().toString());
}
<properties>
<hadoop.version>2.6.0</hadoop.version>
<hadoop.release>cdh5.12.1</hadoop.release>
</properties>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
<version>${hadoop.version}-mr1-${hadoop.release}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop.version}-${hadoop.release}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}-${hadoop.release}</version>
</dependency>
</dependencies>