Java Hadoop中的MapReduce程序,它实现了一个简单的“;你可能认识的人”;

Java Hadoop中的MapReduce程序,它实现了一个简单的“;你可能认识的人”;,java,hadoop,Java,Hadoop,​输入​文件​包含​这个​邻接​列表​及​有​倍数​线​在里面​这个​下列的​格式: import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.a

​输入​文件​包含​这个​邻接​列表​及​有​倍数​线​在里面​这个​下列的​格式:

import java.io.IOException; 
import org.apache.hadoop.conf.Configuration; 
import org.apache.hadoop.fs.Path; 
import org.apache.hadoop.io.IntWritable; 
import org.apache.hadoop.io.LongWritable; 
import org.apache.hadoop.io.Text; 
import org.apache.hadoop.mapreduce.Job; 
import org.apache.hadoop.mapreduce.Mapper; 
import org.apache.hadoop.mapreduce.Reducer; 
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; 
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class Friends 
{

public class FriendsMap extends Mapper < LongWritable, Text, Text, IntWritable >
    {
    private Text friendsAB;
    private Text friendsBA;
    private IntWritable one = new IntWritable(1);
    private IntWritable oneLess = new IntWritable(-999999999);
        //@SuppressWarnings("null")
        @Override 
        public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException 
        { 
            String friendsOfA[] = null;     //This will be all of the friends of the user in this row
            String oneRow[] = value.toString().split("\t,");    //Break the row up into users IDs
            String userA = oneRow[0];       //This is the main user for this row
            for (int i=1; i < oneRow.length; i++)   //Create an array of the rest of the users in this row
            {
                friendsOfA[i-1] = oneRow[i];
            }
            for (int i=0; i < oneRow.length; i++)   //Output the main user in pairs with all friends plus a lagre negative #
            {
                friendsAB.set(userA + " " + friendsOfA[i]);
                context.write(friendsAB, oneLess);
                System.out.println(friendsAB + " " + oneLess);
            }
            for (int i = 0; i < friendsOfA.length; i++)     //Output each friend pair plus the number 1
            {
                for (int j = i + 1; j < friendsOfA.length; j++) 
                {
                    friendsAB.set(friendsOfA[i] + " " + friendsOfA[j]);
                    friendsBA.set(friendsOfA[j] + " " + friendsOfA[i]);
                    context.write(friendsAB, one);
                    context.write(friendsBA, one);
                    System.out.println(friendsAB + " " + one);
                    System.out.println(friendsBA + " " + one);
                }
            }
        }
    }

class FriendReducer extends Reducer < Text, IntWritable, Text, IntWritable > 
    { 
        private IntWritable result = new IntWritable(); 
        @Override 
        public void reduce( Text key, Iterable < IntWritable > values, Context context) throws IOException, InterruptedException 
        { 
            int sum = 0; 
            for (IntWritable val : values) 
            { 
                sum += val.get(); 
            } 
            if (sum > 1)
            {
                result.set( sum); 
                context.write( key, result);
            }
            //At this point I have all pairs of users with recomenede friends and a count of how many times they each
            //friend has been recomended to a user.
            //I need to sort by user and then by number of recomendations.
            //Then print the user <tab> all recomendations with commas between them.
        } 
    }


public static void main( String[] args) throws Exception 
    { 
        Configuration conf = new Configuration();
        Job job = Job.getInstance( conf, "Friends");
        job.setJarByClass(Friends.class);
        FileInputFormat.addInputPath( job, new Path("input")); 
        FileOutputFormat.setOutputPath( job, new Path("output")); 
        job.setMapperClass( FriendsMap.class); 
        job.setCombinerClass( FriendReducer.class); 
        job.setReducerClass( FriendReducer.class);
        job.setOutputKeyClass( Text.class); 
        job.setOutputValueClass( IntWritable.class);

        System.exit( job.waitForCompletion( true) ? 0 : 1); 
    }
}
import java.io.IOException;
导入org.apache.hadoop.conf.Configuration;
导入org.apache.hadoop.fs.Path;
导入org.apache.hadoop.io.IntWritable;
导入org.apache.hadoop.io.LongWritable;
导入org.apache.hadoop.io.Text;
导入org.apache.hadoop.mapreduce.Job;
导入org.apache.hadoop.mapreduce.Mapper;
导入org.apache.hadoop.mapreduce.Reducer;
导入org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
导入org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
公课朋友
{
公共类FriendsMap扩展映射器
{
私人文本friendsAB;
私人文本友谊;
私有IntWritable one=新的IntWritable(1);
private intwriteable oneLess=新的intwriteable(-99999999);
//@抑制警告(“空”)
@凌驾
公共void映射(LongWritable键、文本值、上下文上下文)引发IOException、InterruptedException
{ 
String FriendsOAF[]=null;//这将是此行中用户的所有朋友
String oneRow[]=value.toString().split(“\t,”);//将行拆分为用户ID
字符串userA=oneRow[0];//这是此行的主用户
for(int i=1;i
{ 
私有IntWritable结果=新的IntWritable();
@凌驾
public void reduce(文本键,Iterable值,上下文上下文)抛出IOException、InterruptedException
{ 
整数和=0;
for(可写入值:值)
{ 
sum+=val.get();
} 
如果(总和>1)
{
结果集(总和);
编写(键、结果);
}
//此时,我有了所有具有推荐好友的用户对,并统计了每个用户的推荐次数
//朋友已被推荐给用户。
//我需要按用户排序,然后按推荐数量排序。
//然后向用户打印所有建议,并在建议之间加逗号。
} 
}
公共静态void main(字符串[]args)引发异常
{ 
Configuration conf=新配置();
Job=Job.getInstance(conf,“Friends”);
job.setJarByClass(Friends.class);
addInputPath(作业,新路径(“输入”));
setOutputPath(作业,新路径(“输出”));
job.setMapperClass(FriendsMap.class);
job.setCombinerClass(FriendReducer.class);
job.setReducerClass(FriendReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
系统退出(作业等待完成(真)?0:1;
}
}
这就是我在控制台中遇到的错误

17/11/15 16:05:51警告util.NativeCodeLoader:无法加载 适用于您的平台的本机hadoop库。。。使用内置java类 如适用,15年11月17日16:06:54信息配置。弃用: session.id已弃用。而是使用dfs.metrics.session-id 17/11/15 16:06:54 INFO jvm.JvmMetrics:使用初始化jvm度量 processName=JobTracker,sessionId=17/11/15 16:06:54警告 JobClient:使用GenericOptionsParser解析参数。 应用程序应该为相同的应用程序实现工具。17/11/15 16:06:55 WARN mapred.JobClient:未设置作业jar文件。用户类可能不可用 建立请参阅JobConf(类)或JobConf#setJar(字符串)。17/11/15 16:06:55 INFO input.FileInputFormat:要处理的总输入路径:2 17/11/15 16:07:05信息映射。作业客户端:正在运行作业:作业\u本地426825952\u 0001 17/11/15 16:07:05信息映射。LocalJobRunner:OutputCommitter设置为 config null 17/11/15 16:07:05 INFO mapred.LocalJobRunner: OutputCommitter是 org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter 15年11月17日 16:07:05信息映射。本地JobRunner:正在等待映射任务2015年11月17日 16:07:05信息映射。LocalJobRunner:正在启动任务: 尝试本地426825952\u 0001\u m\u 000000\u 0 17/11/15 16:07:05警告 Counters:Group org.apache.hadoop.mapred.Task$Counter是 不赞成。改用org.apache.hadoop.mapreduce.TaskCounter 17/11/15 16:07:05信息util.ProcessTree:setsid已退出,退出代码为 0 17/11/15 16:07:05信息映射。任务:使用ResourceCalculatorPlugin :org.apache.hadoop.util。LinuxResourceCalculatorPlugin@670217f0 17/11/15 16:07:05信息映射。LocalJobRunner:正在启动任务: 尝试本地426825952\u 0001\u m\u000001\u 0 17/11/15 16:07:05警告 Counters:Group org.apache.hadoop.mapred.Task$Co
public static class FriendsMap extends Mapper < LongWritable, Text, Text, IntWritable >
    {
        //@SuppressWarnings("null")
        @Override 
        public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException 
        { 

            String friendsOfA[];    //This will be all of the friends of the user in this row
            friendsOfA = new String[] {};
            String friendsAB        = "1";      //This will be used to create pairs of users
            String friendsBA        = "2";      //This will be used to create pairs of users
            Text pairA;
            Text pairB; 
            IntWritable one     = new IntWritable(1);           //1 if they are not an existing pair here
            IntWritable oneLess = new IntWritable(-999999999);  // if they are an existing pair

            String oneRow[] = value.toString().split("\t,");    //Break the row up into users IDs
            Text userA = new Text(oneRow[0]);                                   //This is the main user for this row
            for (int i=1; i < oneRow.length; i++)   //Create an array of the rest of the users in this row
            {
                friendsOfA[i-1] = oneRow[i];
            }
            for (int i=0; i < oneRow.length; i++)   //Output the main user in pairs with all friends plus a large negative #
            {                                       //We do not want to recommend them as friends because they are friends 
                Text FOA = new Text (friendsOfA[i]);
                friendsAB = (userA + " " + FOA);
                Text pair = new Text (friendsAB);
                context.write(pair, oneLess);
                System.out.println(pair + " " + oneLess);
            }
            for (int i = 0; i < friendsOfA.length; i++)     //Output each friend pair plus the number 1
            {                                               //We want to recommend them as potential friends
                for (int j = i + 1; j < friendsOfA.length; j++) 
                {
                    Text FOA = new Text (friendsOfA[i]);
                    Text FOB = new Text (friendsOfA[j]);
                    friendsAB = (FOA + " " + FOB);
                    friendsBA = (FOB + " " + FOA);
                    pairA = new Text (friendsAB);
                    pairB = new Text (friendsBA);
                    context.write(pairA, one);
                    context.write(pairB, one);
                    System.out.println(pairA + " " + one);
                    System.out.println(pairB + " " + one);
                }
            }
        }
    }
public class Friends {

    public static class FriendsMap extends Mapper <...> {}

    public static class FriendReducer extends Reducer <...> {}

    public static void main( String[] args) throws Exception { 
        Configuration conf = new Configuration();
        Job job = Job.getInstance(conf, "Friends");
        job.setJarByClass(Friends.class);
        FileInputFormat.addInputPath(job, new Path("input")); 
        FileOutputFormat.setOutputPath(job, new Path("output")); 
        job.setMapperClass(FriendsMap.class); 
        job.setCombinerClass(FriendReducer.class); 
        job.setReducerClass(FriendReducer.class);
        job.setOutputKeyClass(Text.class); 
        job.setOutputValueClass(IntWritable.class);

        System.exit( job.waitForCompletion( true) ? 0 : 1); 
    }
}