Scala spark UDF don';不接受数组
为什么Spark UDF函数不支持scala数组,而WrappedArray UDF工作正常? 这里在函数定义数组[Date]中给出了类强制转换异常。一旦换成WrappedArray,它的效果就很好了Scala spark UDF don';不接受数组,scala,apache-spark,Scala,Apache Spark,为什么Spark UDF函数不支持scala数组,而WrappedArray UDF工作正常? 这里在函数定义数组[Date]中给出了类强制转换异常。一旦换成WrappedArray,它的效果就很好了 def getDate(listOfDate:mutable.WrappedArray[Date], packageSD:Date, durationrange:Int):Date = { var nextdate = packageSD.toLocalDate.plusDays(durati
def getDate(listOfDate:mutable.WrappedArray[Date], packageSD:Date, durationrange:Int):Date = {
var nextdate = packageSD.toLocalDate.plusDays(durationrange)
var billdate:Date = null
var mindays = durationrange
var billingdate = listOfDate.map(rec=>
{
println("list date"+rec)
var recdate = rec
var daysDiff = Math.abs(ChronoUnit.DAYS.between(recdate.toLocalDate,nextdate)).toInt
if(daysDiff<=mindays) {
mindays = daysDiff
billdate = recdate
}
println("prefst"+recdate)
println("nextdate"+nextdate)
println("billdate"+billdate)
println("mindays"+mindays)
}
)
return billdate
}
import org.apache.spark.sql.functions.udf
val udffn = udf(getDate _)
def getDate(listOfDate:mutable.WrappedArray[Date],packageSD:Date,durationrange:Int):日期={
var nextdate=packageSD.toLocalDate.plusDays(持续时间范围)
var billdate:Date=null
var mindays=持续时间范围
var billingdate=listOfDate.map(rec=>
{
println(“列表日期”+rec)
var recdate=rec
var daysDiff=Math.abs(ChronoUnit.DAYS.between(recdate.toLocalDate,nextdate)).toInt
if(daysDiffUDF需要一个Seq
和WrappedArray
是一个Seq
,而Array
不是Seq