Java Spark UDF强制转换异常

Java Spark UDF强制转换异常,java,apache-spark,Java,Apache Spark,使用spark UDF时面临铸造问题 import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.gson.Gson; import com.typesafe.config.Config; import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.sql.*; import org.

使用spark UDF时面临铸造问题

import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.gson.Gson;
import com.typesafe.config.Config;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.sql.*;
import org.apache.spark.sql.api.java.UDF1;
import org.apache.spark.sql.types.DataTypes;
import scala.Tuple2;
import scala.collection.JavaConverters;
import scala.collection.mutable.WrappedArray;


import java.io.IOException;
import java.io.Serializable;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.stream.Collectors;
import static java.util.Arrays.copyOfRange;
import static org.apache.spark.sql.functions.*;


  UDF1 mode1 = new UDF1<WrappedArray<Map<Double, Integer>>, String>() {
  @Override
  public String call(WrappedArray<Map<Double, Integer>> maps) throws Exception {

    List<Map<Double, Integer>> lis = (List<Map<Double, Integer>>) JavaConverters.seqAsJavaListConverter(maps).asJava();
    java.util.Map<Double,Integer> a= lis.stream().flatMap(map -> map.entrySet().stream())
            .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
    System.out.println(a.get(key));
    return "";
  }
};
import com.google.common.collect.list;
导入com.google.common.collect.Maps;
导入com.google.gson.gson;
导入com.typesafe.config.config;
导入org.apache.spark.api.java.javapairdd;
导入org.apache.spark.sql.*;
导入org.apache.spark.sql.api.java.UDF1;
导入org.apache.spark.sql.types.DataTypes;
导入scala.Tuple2;
导入scala.collection.JavaConverters;
导入scala.collection.mutable.WrappedArray;
导入java.io.IOException;
导入java.io.Serializable;
导入java.util.List;
导入java.util.Map;
导入java.util.Set;
导入java.util.concurrent.ExecutionException;
导入java.util.stream.collector;
导入静态java.util.Arrays.copyOfRange;
导入静态org.apache.spark.sql.functions.*;
UDF1模式1=新UDF1(){
@凌驾
公共字符串调用(WrappedArray映射)引发异常{
List lis=(List)JavaConverters.seqasjavalistcverter(maps.asJava();
java.util.Map a=lis.stream().flatMap(Map->Map.entrySet().stream())
.collect(Collectors.toMap(Map.Entry::getKey,Map.Entry::getValue));
System.out.println(a.get(key));
返回“”;
}
};
错误:

原因:java.lang.ClassCastException: scala.collection.immutable.Map$Map1不能强制转换为java.util.Map 在 java.util.stream.ReferencePipeline$7$1.accept(ReferencePipeline.java:269)


你能分享你的代码导入吗?@tarun用导入更新了这个问题