存储javapairdd<;字符串,Map<;字符串,列表<;字符串>&燃气轮机;转换为多个文件

存储javapairdd<;字符串,Map<;字符串,列表<;字符串>&燃气轮机;转换为多个文件,java,hadoop,apache-spark,Java,Hadoop,Apache Spark,我正在使用Spark 1.6并试图解决以下问题。 我有一个 javapairdd <"A", <{"A1",["a1","b1","c1"]}, {"A2",["a2","b2","c2"]}>> <"B", <{"B1",["bb1","bb2","bb3"]}> /output/A/A1 (content of A1 should have [a1,b1,c1]) /output/A/A2 (content of A2 should have [a

我正在使用Spark 1.6并试图解决以下问题。 我有一个
javapairdd
<"A", <{"A1",["a1","b1","c1"]}, {"A2",["a2","b2","c2"]}>>
<"B", <{"B1",["bb1","bb2","bb3"]}>
/output/A/A1 (content of A1 should have [a1,b1,c1])
/output/A/A2 (content of A2 should have [a2,b2,c2])
/output/B/B1 (content of B1 should have [bb1,bb2,bb3])
public static void main(String a[]) {
            JavaPairRDD<String, Map<String, List<String>> pair;
            pair.saveAsHadoopFile(directory + "/output", String.class, Map.class,
                            RDDMultipleTextOutputFormat.class);
        }



public static class RDDMultipleTextOutputFormat<A, B> extends   MultipleTextOutputFormat<A, B> {
        @Override
        protected String generateFileNameForKeyValue(A key, B value, String name) {
            return key.toString(); // + "/" + name;
        }

        @Override
        protected B generateActualValue(A key, B value) {
            //return value;
            Map<String, List<String>> map = (HashMap<String, List<String>>)value;
            for(Map.Entry<String, List<String>>entry: map.entrySet()) {
                generateFileNameForKeyValue((A)(key.toString() + "/" + entry.getKey()), (B)(entry.getValue().toString()), entry.getKey());

            }

            //return value.saveAsHadoopFile((Map)value., String.class, Map.class,
            //  RDDMultipleTextOutputFormat.class);
       }

       @Override
           protected A generateActualKey(A key, B value) {
               return null;
           }

       /*@Override
           public RecordWriter<A, B> getRecordWriter(FileSystem fs, JobConf job, String name, Progressable prog) throws IOException {
               if (name.startsWith("apple")) {
                   return new TextOutputFormat<A, B>().getRecordWriter(fs, job, name, prog);
               } else if (name.startsWith("banana")) {
                   return new TextOutputFormat<A, B>().getRecordWriter(fs, job, name, prog);
               }
               return super.getRecordWriter(fs, job, name, prog);
           }*/
      }