Java 无法找到或加载主类org.apache.hadoop.mapreduce.v2.app.MRAppMaster mapred-site.xml的内容: mapreduce.framework.name 纱线 warn.app.mapreduce.am.env HADOOP\u MAPRED\u HOME=/HOME/admin/HADOOP-3.1.0 mapreduce.map.env HADOOP\u MAPRED\u HOME=/HOME/admin/HADOOP-3.1.0 mapreduce.reduce.env HADOOP\u MAPRED\u HOME=/HOME/admin/HADOOP-3.1.0 mapreduce.application.classpath $HADOOP\u MAPRED\u HOME/share/HADOOP/mapreduce/*,$HADOOP\u MAPRED\u HOME/share/HADOOP/mapreduce/lib/*

Java 无法找到或加载主类org.apache.hadoop.mapreduce.v2.app.MRAppMaster mapred-site.xml的内容: mapreduce.framework.name 纱线 warn.app.mapreduce.am.env HADOOP\u MAPRED\u HOME=/HOME/admin/HADOOP-3.1.0 mapreduce.map.env HADOOP\u MAPRED\u HOME=/HOME/admin/HADOOP-3.1.0 mapreduce.reduce.env HADOOP\u MAPRED\u HOME=/HOME/admin/HADOOP-3.1.0 mapreduce.application.classpath $HADOOP\u MAPRED\u HOME/share/HADOOP/mapreduce/*,$HADOOP\u MAPRED\u HOME/share/HADOOP/mapreduce/lib/*,java,hadoop,hadoop3,Java,Hadoop,Hadoop3,虽然我已经设置了warn.app.mapreduce.am.env和其他参数;我无法找到或加载主类org.apache.hadoop.mapreduce.v2.app.MRAppMaster错误。 我试图在linux机器上安装hadoop的地方远程运行map reduce程序,并在windows机器上运行它。 以下是我的作业配置设置 public class WordCount { public static void main(String[] args) throws IOE

虽然我已经设置了warn.app.mapreduce.am.env和其他参数;我无法找到或加载主类org.apache.hadoop.mapreduce.v2.app.MRAppMaster错误。 我试图在linux机器上安装hadoop的地方远程运行map reduce程序,并在windows机器上运行它。 以下是我的作业配置设置

public class WordCount {
  public static void main(String[] args)
      throws IOException, ClassNotFoundException, InterruptedException {
    //
    UserGroupInformation ugi = UserGroupInformation.createRemoteUser("admin");
    ugi.doAs(new PrivilegedExceptionAction<Void>() {

      public Void run() throws Exception {
        try {
          Configuration configuration = new Configuration();

          configuration.set("yarn.resourcemanager.address", "192.168.33.75:50001"); // see step 3
          configuration.set("mapreduce.framework.name", "yarn");
          // configuration.set("yarn.app.mapreduce.am.env",
          // "HADOOP_MAPRED_HOME=/home/admin/hadoop-3.1.0");
          // configuration.set("mapreduce.map.env", "HADOOP_MAPRED_HOME=/home/admin/hadoop-3.1.0");
          // configuration.set("mapreduce.reduce.env",
          // "HADOOP_MAPRED_HOME=/home/admin/hadoop-3.1.0");
          configuration.set("fs.defaultFS", "hdfs://192.168.33.75:54310"); // see step 2
          configuration.set("mapreduce.app-submission.cross-platform", "true");
          configuration.set("mapred.remote.os", "Linux");
          configuration.set("yarn.application.classpath",
              "$HADOOP_CONF_DIR:$HADOOP_COMMON_HOME/*:$HADOOP_COMMON_HOME/lib/*:"
                  + "$HADOOP_HDFS_HOME/*:$HADOOP_HDFS_HOME/lib/*:"
                  + "$HADOOP_YARN_HOME/*:$HADOOP_YARN_HOME/lib/*:"
                  + "$HADOOP_MAPRED_HOME/*:$HADOOP_MAPRED_HOME/lib/*");

          Job job = Job.getInstance(configuration);

          job.setJarByClass(WordCount.class); // use this when uploaded the Jar to the server and
                                              // running the job directly and locally on the server
          job.setOutputKeyClass(Text.class);
          job.setOutputValueClass(IntWritable.class);
          job.setMapperClass(MapForWordCount.class);
          job.setReducerClass(ReduceForWordCount.class);

          Path input = new Path("/user/admin/wordCountInput.txt");
          Path output = new Path("/user/admin/output");
          FileInputFormat.addInputPath(job, input);
          FileOutputFormat.setOutputPath(job, output);
          System.exit(job.waitForCompletion(true) ? 0 : 1);
        } catch (Exception e) {
          e.printStackTrace();
        }
        return null;
      }

    });


  }
公共类字数{
公共静态void main(字符串[]args)
抛出IOException、ClassNotFoundException、InterruptedException{
//
UserGroupInformation ugi=UserGroupInformation.createRemoteUser(“admin”);
ugi.doAs(新特权接受行动){
public Void run()引发异常{
试一试{
配置=新配置();
configuration.set(“warn.resourcemanager.address”,“192.168.33.75:50001”);//参见步骤3
set(“mapreduce.framework.name”、“纱线”);
//configuration.set(“warn.app.mapreduce.am.env”,
//“HADOOP_MAPRED_HOME=/HOME/admin/HADOOP-3.1.0”);
//set(“mapreduce.map.env”、“HADOOP\u MAPRED\u HOME=/HOME/admin/HADOOP-3.1.0”);
//configuration.set(“mapreduce.reduce.env”,
//“HADOOP_MAPRED_HOME=/HOME/admin/HADOOP-3.1.0”);
configuration.set(“fs.defaultFS”hdfs://192.168.33.75:54310“”;//参见步骤2
set(“mapreduce.app submission.cross-platform”、“true”);
set(“mapred.remote.os”、“Linux”);
configuration.set(“warn.application.classpath”,
“$HADOOP_CONF_DIR:$HADOOP_COMMON_HOME/*:$HADOOP_COMMON_HOME/lib/*:”
+“$HADOOP\u HDFS\u HOME/*:$HADOOP\u HDFS\u HOME/lib/*:”
+“$HADOOP\u Thread\u HOME/*:$HADOOP\u Thread\u HOME/lib/*:”
+“$HADOOP\u MAPRED\u HOME/*:$HADOOP\u MAPRED\u HOME/lib/*”;
Job Job=Job.getInstance(配置);
job.setJarByClass(WordCount.class);//将Jar上载到服务器并
//直接在服务器上本地运行作业
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
setMapperClass(MapForWordCount.class);
job.setReducerClass(reduceForDorCount.class);
路径输入=新路径(“/user/admin/wordCountInput.txt”);
路径输出=新路径(“/user/admin/output”);
addInputPath(作业,输入);
setOutputPath(作业,输出);
系统退出(作业等待完成(真)?0:1;
}捕获(例外e){
e、 printStackTrace();
}
返回null;
}
});
}
请帮帮我。我在过去6天一直在解决这个问题。提前谢谢。
Hadoop版本:3.1.0

我也遇到了同样的问题,通过在mapred site.xml中添加这个解决了(因此编辑mapreduce.application.classpath属性)


mapreduce.application.classpath
$HADOOP\u MAPRED\u HOME/share/HADOOP/mapreduce/*,$HADOOP\u MAPRED\u HOME/share/mapreduce/lib/*,$HADOOP\u MAPRED\u HOME/share/HADOOP/common/*,$HADOOP\u MAPRED\u HOME/share/HADOOP/share/*,$HADOOP\u MAPRED\u HOME/share/share/lib/*,$HADOOP\u MAPRED\u HOME/HADOOP/share/*,$HADOOP\share/HADOOP/HADOOP/HADOOP/share/*/*

我在warn-site.xml中添加了以下属性

<property>
  <name>yarn.application.classpath</name>
  <value> $HADOOP_CONF_DIR,$HADOOP_COMMON_HOME/share/hadoop/common/*,$HADOOP_COMMON_HOME/share/hadoop/common/lib/*,$HADOOP_HDFS_HOME/share/hadoop/hdfs/*,$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib/*,$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*,$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*,
    $HADOOP_YARN_HOME/share/hadoop/yarn/*,$HADOOP_YARN_HOME/share/hadoop/yarn/lib/* 
  </value>
</property>

我的程序现在运行顺利。请随时询问详细信息。

问题是您的资源管理器(Thread)无法加载Hadoop库(JAR)。 我通过更新配置解决了这个问题。 将此添加到warn-site.xml:


.application.classpath
C:/hadoop-2.8.0/share/hadoop/mapreduce/*,C:/hadoop-2.8.0/share/hadoop/mapreduce/lib/*,C:/hadoop-2.8.0/share/hadoop/common/*,C:/hadoop-2.8.0/share/hadoop/common/lib/*,
C:/hadoop-2.8.0/share/hadoop/hdfs/*,C:/hadoop-2.8.0/share/hadoop/hdfs/lib/*,C:/hadoop-2.8.0/share/hadoop/warn/*,C:/hadoop-2.8.0/share/hadoop/warn/lib/*

请注意,此处使用的路径可以是相对的,具体取决于您的系统。

只需编辑mapred site.xml文件:

添加以下属性:

  • 
    mapreduce.framework.name
    纱线
    

  • 
    warn.app.mapreduce.am.env
    HADOOP\u MAPRED\u HOME=/Users/adityaatri/Applications/HADOOP-3.1.3
    

  • 
    mapreduce.map.env
    HADOOP\u MAPRED\u HOME=/Users/adityaatri/Applications/HADOOP-3.1.3
    

  • 
    mapreduce.reduce.env
    HADOOP\u MAPRED\u HOME=/Users/adityaatri/Applications/HADOOP-3.1.3
    

  • 现在,对于上述4个属性,将路径
    /Users/adityaatri/Applications/hadoop-3.1.3
    替换为您的hadoop家庭地址

    现在添加第5个属性:

  • 
    mapreduce.application.classpath
    
  • 在终端中执行以下命令后,必须用内容填充
    元素:

  • export HADOOP\u CLASSPATH=$(HADOOP CLASSPATH)

  • echo$HADOOP\u类路径

  • 我的终端的输出:

    /Users/adityaatri/Applications/hadoop-3.1.3/etc/hadoop:/Users/adityaatri/Applications/hadoop-3.1.3/share/hadoop/common/lib/*:/Users/adityaatri/Applications/hadoop-3.1.3/share/hadoop/common/*:/Users/adityaatri Applications/hadoop/hadoop/hdfs:/Users/adityaatri/Applications/hadoop-3.1.1。
    
    <property> 
        <name>mapreduce.application.classpath</name>
        <value>$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*,$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*,$HADOOP_MAPRED_HOME/share/hadoop/common/*,$HADOOP_MAPRED_HOME/share/hadoop/common/lib/*,$HADOOP_MAPRED_HOME/share/hadoop/yarn/*,$HADOOP_MAPRED_HOME/share/hadoop/yarn/lib/*,$HADOOP_MAPRED_HOME/share/hadoop/hdfs/*,$HADOOP_MAPRED_HOME/share/hadoop/hdfs/lib/*</value>
    </property>
    
    <property>
      <name>yarn.application.classpath</name>
      <value> $HADOOP_CONF_DIR,$HADOOP_COMMON_HOME/share/hadoop/common/*,$HADOOP_COMMON_HOME/share/hadoop/common/lib/*,$HADOOP_HDFS_HOME/share/hadoop/hdfs/*,$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib/*,$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*,$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*,
        $HADOOP_YARN_HOME/share/hadoop/yarn/*,$HADOOP_YARN_HOME/share/hadoop/yarn/lib/* 
      </value>
    </property>
    
      configuration.set("yarn.application.classpath",
                  "{{HADOOP_CONF_DIR}},{{HADOOP_COMMON_HOME}}/share/hadoop/common/*,{{HADOOP_COMMON_HOME}}/share/hadoop/common/lib/*,"
                      + " {{HADOOP_HDFS_HOME}}/share/hadoop/hdfs/*,{{HADOOP_HDFS_HOME}}/share/hadoop/hdfs/lib/*,"
                      + "{{HADOOP_MAPRED_HOME}}/share/hadoop/mapreduce/*,{{HADOOP_MAPRED_HOME}}/share/hadoop/mapreduce/lib/*,"
                      + "{{HADOOP_YARN_HOME}}/share/hadoop/yarn/*,{{HADOOP_YARN_HOME}}/share/hadoop/yarn/lib/*");
    
    <property>
    <name>yarn.application.classpath</name>
    <value>C:/hadoop-2.8.0/share/hadoop/mapreduce/*,C:/hadoop-2.8.0/share/hadoop/mapreduce/lib/*,C:/Hadoop-2.8.0/share/hadoop/common/*,C:/Hadoop-2.8.0/share/hadoop/common/lib/*,
        C:/hadoop-2.8.0/share/hadoop/hdfs/*,C:/hadoop-2.8.0/share/hadoop/hdfs/lib/*,C:/hadoop-2.8.0/share/hadoop/yarn/*,C:/hadoop-2.8.0/share/hadoop/yarn/lib/*</value>
    </property>