Warning: file_get_contents(/data/phpspider/zhask/data//catemap/8/vim/5.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
错误:作业初始化失败:java.net.UnknownHostException::27017不是位于的有效Inet地址_Java_Mongodb_Hadoop_Remote Server_Unknown Host - Fatal编程技术网

错误:作业初始化失败:java.net.UnknownHostException::27017不是位于的有效Inet地址

错误:作业初始化失败:java.net.UnknownHostException::27017不是位于的有效Inet地址,java,mongodb,hadoop,remote-server,unknown-host,Java,Mongodb,Hadoop,Remote Server,Unknown Host,我正在编写一个小Java程序,将作业远程从Windows机器提交到Hadoop机器。输入和输出来自MongoDB。作业在服务器上运行时失败。以下是我从job tracker获得的错误: Job initialization failed: java.net.UnknownHostException: <my ip>:27017 is not a valid Inet address at org.apache.hadoop.net.NetUtils.verifyHostnames(N

我正在编写一个小Java程序,将作业远程从Windows机器提交到Hadoop机器。输入和输出来自MongoDB。作业在服务器上运行时失败。以下是我从job tracker获得的错误:

Job initialization failed: java.net.UnknownHostException: <my ip>:27017 is not a valid Inet address at
org.apache.hadoop.net.NetUtils.verifyHostnames(NetUtils.java:587) at 
org.apache.hadoop.mapred.JobInProgress.initTasks(JobInProgress.java:734) at 
org.apache.hadoop.mapred.JobTracker.initJob(JobTracker.java:3890) at 
org.apache.hadoop.mapred.EagerTaskInitializationListener$InitJob.run(EagerTaskInitializationListener.java:79) at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at 
java.lang.Thread.run(Thread.java:724)
谁能帮我解决这个问题?下面是我的Java代码

public class MyFirstJob extends Configured implements Tool {
    public int run(String[] args) throws Exception {

        Configuration conf = getConf();

        conf.set("mapred.job.tracker", "<my ip>:54311");
        conf.set("mongo.input.uri", "mongodb://<my ip>:27017/hadoop_in.yield_historical_in");
        conf.set("mongo.output.uri", "mongodb://<my ip>:27017/hadoop_out.yield_historical_out");

        Job job = new Job(conf, "Word Count");

        job.setInputFormatClass(com.mongodb.hadoop.MongoInputFormat.class);
        job.setMapOutputKeyClass(org.apache.hadoop.io.NullWritable.class);
        job.setMapOutputValueClass(com.mongodb.hadoop.io.BSONWritable.class);

        // specify output types
        job.setOutputFormatClass(com.mongodb.hadoop.MongoOutputFormat.class);
        job.setOutputKeyClass(org.apache.hadoop.io.NullWritable.class);
        job.setOutputValueClass(com.mongodb.hadoop.io.BSONWritable.class);

        // specify a mapper
        job.setMapperClass(TreasuryYieldMapper.class);

        // specify a reducer
        job.setReducerClass(TreasuryYieldReducer.class);
        job.setCombinerClass(TreasuryYieldReducer.class);

        job.setJarByClass(MyFirstJob.class);

        return job.waitForCompletion(true) ? 0 : 1;
    }

    public static void main(String[] args) throws Exception {
        System.setProperty("HADOOP_USER_NAME", "hduser");
        UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hduser");
        ugi.doAs(new PrivilegedExceptionAction() {
            @Override
            public Object run() throws Exception {
                Configuration conf = new Configuration();
                conf.set("hadoop.job.ugi", "hduser");
                ToolRunner.run(conf, new MyFirstJob(), null);
                return null;
            }
        });
    }
}

public class TreasuryYieldMapper
    extends Mapper<Object, BSONObject, IntWritable, DoubleWritable> {

    @Override
    public void map(final Object pKey,
                    final BSONObject pValue,
                    final Context pContext)
        throws IOException, InterruptedException {

        final int year = ((Date) pValue.get("_id")).getYear() + 1900;
        double bid10Year = ((Number) pValue.get("bc10Year")).doubleValue();

        pContext.write(new IntWritable(year), new DoubleWritable(bid10Year));
    }

    private static final Log LOG = LogFactory.getLog(TreasuryYieldMapper.class);
}

public class TreasuryYieldReducer
    extends Reducer<IntWritable, DoubleWritable, IntWritable, BSONWritable> {

    private static final Log LOG = LogFactory.getLog(TreasuryYieldReducer.class);

    @Override
    public void reduce(final IntWritable pKey, final Iterable<DoubleWritable> pValues, final Context pContext)
        throws IOException, InterruptedException {

        int count = 0;
        double sum = 0;
        for (final DoubleWritable value : pValues) {
            sum += value.get();
            count++;
        }

        final double avg = sum / count;

        LOG.debug("Average 10 Year Treasury for " + pKey.get() + " was " + avg);

        BasicBSONObject output = new BasicBSONObject();
        output.put("count", count);
        output.put("avg", avg);
        output.put("sum", sum);
        pContext.write(pKey, new BSONWritable(output));
    }
}