Java toDF()返回链接错误:重复的类定义

Java toDF()返回链接错误:重复的类定义,java,hadoop,apache-spark,mapr,Java,Hadoop,Apache Spark,Mapr,我想把RDD转换成Dataframe,我很确定我是按照规则做的,但是这个错误似乎让我的代码一直在运行。我不知道如何解决重复类定义的问题 以下是我在spark中的代码: val auctionDF = auctionsRDD.toDF() 这是错误 java.lang.reflect.InvocationTargetException at sun.reflect.GeneratedMethodAccessor2.invoke(Unknown Source) at sun.ref

我想把RDD转换成Dataframe,我很确定我是按照规则做的,但是这个错误似乎让我的代码一直在运行。我不知道如何解决重复类定义的问题

以下是我在spark中的代码:

val auctionDF = auctionsRDD.toDF()
这是错误

java.lang.reflect.InvocationTargetException
    at sun.reflect.GeneratedMethodAccessor2.invoke(Unknown Source)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at com.mapr.fs.ShimLoader.injectNativeLoader(ShimLoader.java:284)
        at com.mapr.fs.ShimLoader.load(ShimLoader.java:223)
        at com.mapr.fs.MapRFileSystem.<clinit>(MapRFileSystem.java:107)
        at java.lang.Class.forName0(Native Method)
        at java.lang.Class.forName(Class.java:348)
        at org.apache.hadoop.conf.Configuration.getClassByNameOrNull(Configuration.java:2147)
        at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:2112)
        at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2206)
        at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2674)
        at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2687)
        at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:94)
        at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2723)
        at org.apache.hadoop.fs.FileSystem$Cache.getUnique(FileSystem.java:2711)
        at org.apache.hadoop.fs.FileSystem.newInstance(FileSystem.java:454)
        at org.apache.hadoop.fs.FileSystem.newInstance(FileSystem.java:462)
        at org.apache.hadoop.fs.FileSystem.newInstance(FileSystem.java:444)
        at org.apache.hadoop.hive.shims.Hadoop23Shims.getNonCachedFileSystem(Hadoop23Shims.java:944)
        at org.apache.hadoop.hive.ql.exec.Utilities.createDirsWithPermission(Utilities.java:3687)
        at org.apache.hadoop.hive.ql.session.SessionState.createRootHDFSDir(SessionState.java:600)
        at org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(SessionState.java:554)
        at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:508)
        at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:189)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
        at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
        at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:258)
        at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:359)
        at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:263)
        at org.apache.spark.sql.hive.HiveSharedState.metadataHive$lzycompute(HiveSharedState.scala:39)
        at org.apache.spark.sql.hive.HiveSharedState.metadataHive(HiveSharedState.scala:38)
        at org.apache.spark.sql.hive.HiveSharedState.externalCatalog$lzycompute(HiveSharedState.scala:46)
        at org.apache.spark.sql.hive.HiveSharedState.externalCatalog(HiveSharedState.scala:45)
        at org.apache.spark.sql.hive.HiveSessionState.catalog$lzycompute(HiveSessionState.scala:50)
        at org.apache.spark.sql.hive.HiveSessionState.catalog(HiveSessionState.scala:48)
        at org.apache.spark.sql.hive.HiveSessionState$$anon$1.<init>(HiveSessionState.scala:63)
        at org.apache.spark.sql.hive.HiveSessionState.analyzer$lzycompute(HiveSessionState.scala:63)
        at org.apache.spark.sql.hive.HiveSessionState.analyzer(HiveSessionState.scala:62)
        at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:49)
        at org.apache.spark.sql.Dataset.<init>(Dataset.scala:161)
        at org.apache.spark.sql.Dataset.<init>(Dataset.scala:167)
        at org.apache.spark.sql.Dataset$.apply(Dataset.scala:59)
        at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:441)
        at org.apache.spark.sql.SQLContext.createDataset(SQLContext.scala:395)
        at org.apache.spark.sql.SQLImplicits.rddToDatasetHolder(SQLImplicits.scala:163)
        at $line30.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:35)
        at $line30.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:40)
        at $line30.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:42)
        at $line30.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:44)
        at $line30.$read$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:46)
        at $line30.$read$$iw$$iw$$iw$$iw$$iw.<init>(<console>:48)
        at $line30.$read$$iw$$iw$$iw$$iw.<init>(<console>:50)
        at $line30.$read$$iw$$iw$$iw.<init>(<console>:52)
        at $line30.$read$$iw$$iw.<init>(<console>:54)
        at $line30.$read$$iw.<init>(<console>:56)
        at $line30.$read.<init>(<console>:58)
        at $line30.$read$.<init>(<console>:62)
        at $line30.$read$.<clinit>(<console>)
        at $line30.$eval$.$print$lzycompute(<console>:7)
        at $line30.$eval$.$print(<console>:6)
        at $line30.$eval.$print(<console>)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:786)
        at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1047)
        at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:638)
        at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:637)
        at scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31)
        at scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:19)
        at scala.tools.nsc.interpreter.IMain$WrappedRequest.loadAndRunReq(IMain.scala:637)
        at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:569)
        at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:565)
        at scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:807)
        at scala.tools.nsc.interpreter.ILoop.command(ILoop.scala:681)
        at scala.tools.nsc.interpreter.ILoop.processLine(ILoop.scala:395)
        at scala.tools.nsc.interpreter.ILoop.loop(ILoop.scala:415)
        at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply$mcZ$sp(ILoop.scala:923)
        at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909)
        at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909)
        at scala.reflect.internal.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:97)
        at scala.tools.nsc.interpreter.ILoop.process(ILoop.scala:909)
        at org.apache.spark.repl.Main$.doMain(Main.scala:68)
        at org.apache.spark.repl.Main$.main(Main.scala:51)
        at org.apache.spark.repl.Main.main(Main.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:736)
        at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:185)
        at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:210)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:124)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: java.lang.LinkageError: loader (instance of  org/apache/spark/sql/hive/client/IsolatedClientLoader$$anon$1): attempted  duplicate class definition for name: "com/mapr/fs/shim/LibraryLoader"
        at java.lang.ClassLoader.defineClass1(Native Method)
        at java.lang.ClassLoader.defineClass(ClassLoader.java:763)
        ... 98 more
java.lang.RuntimeException: Failure loading MapRClient.
        at com.mapr.fs.ShimLoader.injectNativeLoader(ShimLoader.java:305)
        at com.mapr.fs.ShimLoader.load(ShimLoader.java:223)
        at com.mapr.fs.MapRFileSystem.<clinit>(MapRFileSystem.java:107)
        at java.lang.Class.forName0(Native Method)
        at java.lang.Class.forName(Class.java:348)
        at org.apache.hadoop.conf.Configuration.getClassByNameOrNull(Configuration.java:2147)
        at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:2112)
        at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2206)
        at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2674)
        at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2687)
        at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:94)
        at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2723)
        at org.apache.hadoop.fs.FileSystem$Cache.getUnique(FileSystem.java:2711)
        at org.apache.hadoop.fs.FileSystem.newInstance(FileSystem.java:454)
        at org.apache.hadoop.fs.FileSystem.newInstance(FileSystem.java:462)
        at org.apache.hadoop.fs.FileSystem.newInstance(FileSystem.java:444)
        at org.apache.hadoop.hive.shims.Hadoop23Shims.getNonCachedFileSystem(Hadoop23Shims.java:944)
        at org.apache.hadoop.hive.ql.exec.Utilities.createDirsWithPermission(Utilities.java:3687)
        at org.apache.hadoop.hive.ql.session.SessionState.createRootHDFSDir(SessionState.java:600)
        at org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(SessionState.java:554)
        at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:508)
        at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:189)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
        at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
        at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:258)
        at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:359)
        at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:263)
        at org.apache.spark.sql.hive.HiveSharedState.metadataHive$lzycompute(HiveSharedState.scala:39)
        at org.apache.spark.sql.hive.HiveSharedState.metadataHive(HiveSharedState.scala:38)
        at org.apache.spark.sql.hive.HiveSharedState.externalCatalog$lzycompute(HiveSharedState.scala:46)
        at org.apache.spark.sql.hive.HiveSharedState.externalCatalog(HiveSharedState.scala:45)
        at org.apache.spark.sql.hive.HiveSessionState.catalog$lzycompute(HiveSessionState.scala:50)
        at org.apache.spark.sql.hive.HiveSessionState.catalog(HiveSessionState.scala:48)
        at org.apache.spark.sql.hive.HiveSessionState$$anon$1.<init>(HiveSessionState.scala:63)
        at org.apache.spark.sql.hive.HiveSessionState.analyzer$lzycompute(HiveSessionState.scala:63)
        at org.apache.spark.sql.hive.HiveSessionState.analyzer(HiveSessionState.scala:62)
        at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:49)
        at org.apache.spark.sql.Dataset.<init>(Dataset.scala:161)
        at org.apache.spark.sql.Dataset.<init>(Dataset.scala:167)
        at org.apache.spark.sql.Dataset$.apply(Dataset.scala:59)
        at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:441)
        at org.apache.spark.sql.SQLContext.createDataset(SQLContext.scala:395)
        at org.apache.spark.sql.SQLImplicits.rddToDatasetHolder(SQLImplicits.scala:163)
        at $line30.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:35)
        at $line30.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:40)
        at $line30.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:42)
        at $line30.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:44)
        at $line30.$read$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:46)
        at $line30.$read$$iw$$iw$$iw$$iw$$iw.<init>(<console>:48)
        at $line30.$read$$iw$$iw$$iw$$iw.<init>(<console>:50)
        at $line30.$read$$iw$$iw$$iw.<init>(<console>:52)
        at $line30.$read$$iw$$iw.<init>(<console>:54)
        at $line30.$read$$iw.<init>(<console>:56)
        at $line30.$read.<init>(<console>:58)
        at $line30.$read$.<init>(<console>:62)
        at $line30.$read$.<clinit>(<console>)
        at $line30.$eval$.$print$lzycompute(<console>:7)
        at $line30.$eval$.$print(<console>:6)
        at $line30.$eval.$print(<console>)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:786)
        at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1047)
        at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:638)
        at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:637)
        at scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31)
        at scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:19)
        at scala.tools.nsc.interpreter.IMain$WrappedRequest.loadAndRunReq(IMain.scala:637)
        at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:569)
        at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:565)
        at scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:807)
        at scala.tools.nsc.interpreter.ILoop.command(ILoop.scala:681)
        at scala.tools.nsc.interpreter.ILoop.processLine(ILoop.scala:395)
        at scala.tools.nsc.interpreter.ILoop.loop(ILoop.scala:415)
        at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply$mcZ$sp(ILoop.scala:923)
        at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909)
        at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909)
        at scala.reflect.internal.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:97)
        at scala.tools.nsc.interpreter.ILoop.process(ILoop.scala:909)
        at org.apache.spark.repl.Main$.doMain(Main.scala:68)
        at org.apache.spark.repl.Main$.main(Main.scala:51)
        at org.apache.spark.repl.Main.main(Main.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:736)
        at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:185)
        at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:210)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:124)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: java.lang.reflect.InvocationTargetException
        at sun.reflect.GeneratedMethodAccessor2.invoke(Unknown Source)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at com.mapr.fs.ShimLoader.injectNativeLoader(ShimLoader.java:284)
        ... 94 more
Caused by: java.lang.LinkageError: loader (instance of  org/apache/spark/sql/hive/client/IsolatedClientLoader$$anon$1): attempted  duplicate class definition for name: "com/mapr/fs/shim/LibraryLoader"
        at java.lang.ClassLoader.defineClass1(Native Method)
        at java.lang.ClassLoader.defineClass(ClassLoader.java:763)
        ... 98 more
java.lang.RuntimeException: Failure loading MapRClient.
  at com.mapr.fs.ShimLoader.injectNativeLoader(ShimLoader.java:305)
  at com.mapr.fs.ShimLoader.load(ShimLoader.java:223)
  at com.mapr.fs.MapRFileSystem.<clinit>(MapRFileSystem.java:107)
  at java.lang.Class.forName0(Native Method)
  at java.lang.Class.forName(Class.java:348)
  at org.apache.hadoop.conf.Configuration.getClassByNameOrNull(Configuration.java:2147)
  at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:2112)
  at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2206)
  at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2674)
  at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2687)
  at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:94)
  at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2723)
  at org.apache.hadoop.fs.FileSystem$Cache.getUnique(FileSystem.java:2711)
  at org.apache.hadoop.fs.FileSystem.newInstance(FileSystem.java:454)
  at org.apache.hadoop.fs.FileSystem.newInstance(FileSystem.java:462)
  at org.apache.hadoop.fs.FileSystem.newInstance(FileSystem.java:444)
  at org.apache.hadoop.hive.shims.Hadoop23Shims.getNonCachedFileSystem(Hadoop23Shims.java:944)
  at org.apache.hadoop.hive.ql.exec.Utilities.createDirsWithPermission(Utilities.java:3687)
  at org.apache.hadoop.hive.ql.session.SessionState.createRootHDFSDir(SessionState.java:600)
  at org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(SessionState.java:554)
  at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:508)
  at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:189)
  at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
  at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
  at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
  at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
  at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:258)
  at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:359)
  at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:263)
  at org.apache.spark.sql.hive.HiveSharedState.metadataHive$lzycompute(HiveSharedState.scala:39)
  at org.apache.spark.sql.hive.HiveSharedState.metadataHive(HiveSharedState.scala:38)
  at org.apache.spark.sql.hive.HiveSharedState.externalCatalog$lzycompute(HiveSharedState.scala:46)
  at org.apache.spark.sql.hive.HiveSharedState.externalCatalog(HiveSharedState.scala:45)
  at org.apache.spark.sql.hive.HiveSessionState.catalog$lzycompute(HiveSessionState.scala:50)
  at org.apache.spark.sql.hive.HiveSessionState.catalog(HiveSessionState.scala:48)
  at org.apache.spark.sql.hive.HiveSessionState$$anon$1.<init>(HiveSessionState.scala:63)
  at org.apache.spark.sql.hive.HiveSessionState.analyzer$lzycompute(HiveSessionState.scala:63)
  at org.apache.spark.sql.hive.HiveSessionState.analyzer(HiveSessionState.scala:62)
  at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:49)
  at org.apache.spark.sql.Dataset.<init>(Dataset.scala:161)
  at org.apache.spark.sql.Dataset.<init>(Dataset.scala:167)
  at org.apache.spark.sql.Dataset$.apply(Dataset.scala:59)
  at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:441)
  at org.apache.spark.sql.SQLContext.createDataset(SQLContext.scala:395)
  at org.apache.spark.sql.SQLImplicits.rddToDatasetHolder(SQLImplicits.scala:163)
  ... 50 elided
Caused by: java.lang.reflect.InvocationTargetException: java.lang.LinkageError: loader (instance of  org/apache/spark/sql/hive/client/IsolatedClientLoader$$anon$1): attempted  duplicate class definition for name: "com/mapr/fs/shim/LibraryLoader"
  at sun.reflect.GeneratedMethodAccessor2.invoke(Unknown Source)
  at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  at java.lang.reflect.Method.invoke(Method.java:498)
  at com.mapr.fs.ShimLoader.injectNativeLoader(ShimLoader.java:284)
  ... 94 more
Caused by: java.lang.LinkageError: loader (instance of  org/apache/spark/sql/hive/client/IsolatedClientLoader$$anon$1): attempted  duplicate class definition for name: "com/mapr/fs/shim/LibraryLoader"
  at java.lang.ClassLoader.defineClass1(Native Method)
  at java.lang.ClassLoader.defineClass(ClassLoader.java:763)
  ... 98 more
java.lang.reflect.InvocationTargetException
位于sun.reflect.GeneratedMethodAccessor2.invoke(未知源)
在sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)中
位于java.lang.reflect.Method.invoke(Method.java:498)
位于com.mapr.fs.ShimLoader.injectNativeLoader(ShimLoader.java:284)
位于com.mapr.fs.ShimLoader.load(ShimLoader.java:223)
位于com.mapr.fs.MapRFileSystem(MapRFileSystem.java:107)
位于java.lang.Class.forName0(本机方法)
位于java.lang.Class.forName(Class.java:348)
位于org.apache.hadoop.conf.Configuration.getClassByNameOrNull(Configuration.java:2147)
位于org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:2112)
位于org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2206)
位于org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2674)
位于org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2687)
位于org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:94)
位于org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2723)
位于org.apache.hadoop.fs.FileSystem$Cache.getUnique(FileSystem.java:2711)
位于org.apache.hadoop.fs.FileSystem.newInstance(FileSystem.java:454)
位于org.apache.hadoop.fs.FileSystem.newInstance(FileSystem.java:462)
位于org.apache.hadoop.fs.FileSystem.newInstance(FileSystem.java:444)
位于org.apache.hadoop.hive.shimes.hadoop23shimes.getNonCachedFileSystem(hadoop23shimes.java:944)
位于org.apache.hadoop.hive.ql.exec.Utilities.createDirsWithPermission(Utilities.java:3687)
位于org.apache.hadoop.hive.ql.session.SessionState.createRootHDFSDir(SessionState.java:600)
位于org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(SessionState.java:554)
位于org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:508)
位于org.apache.spark.sql.hive.client.HiveClientImpl.(HiveClientImpl.scala:189)
位于sun.reflect.NativeConstructorAccessorImpl.newInstance0(本机方法)
位于sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
在sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
位于java.lang.reflect.Constructor.newInstance(Constructor.java:423)
位于org.apache.spark.sql.hive.client.IsolatedClient.createClient(IsolatedClient.scala:258)
位于org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:359)
位于org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:263)
位于org.apache.spark.sql.hive.HiveSharedState.metadataHive$lzycompute(HiveSharedState.scala:39)
位于org.apache.spark.sql.hive.HiveSharedState.metadataHive(HiveSharedState.scala:38)
位于org.apache.spark.sql.hive.HiveSharedState.externalCatalog$lzycompute(HiveSharedState.scala:46)
位于org.apache.spark.sql.hive.HiveSharedState.externalCatalog(HiveSharedState.scala:45)
位于org.apache.spark.sql.hive.HiveSessionState.catalog$lzycompute(HiveSessionState.scala:50)
位于org.apache.spark.sql.hive.HiveSessionState.catalog(HiveSessionState.scala:48)
位于org.apache.spark.sql.hive.HiveSessionState$$anon$1。(HiveSessionState.scala:63)
位于org.apache.spark.sql.hive.HiveSessionState.analyzer$lzycompute(HiveSessionState.scala:63)
位于org.apache.spark.sql.hive.HiveSessionState.analyzer(HiveSessionState.scala:62)
位于org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:49)
位于org.apache.spark.sql.Dataset(Dataset.scala:161)
位于org.apache.spark.sql.Dataset。(Dataset.scala:167)
位于org.apache.spark.sql.Dataset$.apply(Dataset.scala:59)
位于org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:441)
位于org.apache.spark.sql.SQLContext.createDataset(SQLContext.scala:395)
位于org.apache.spark.sql.SQLImplicits.rddToDatasetHolder(SQLImplicits.scala:163)
在$line30.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw。(:35)
在$line30.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw。(:40)
在$line30.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw。(:42)
在$line30.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw。(:44)
在$line30.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw。(:46)
在$line30。$read$$iw$$iw$$iw$$iw$$iw$$iw。(:48)
在$line30。$read$$iw$$iw$$iw$$iw。(:50)
在$line30。$read$$iw$$iw$$iw。(:52)
在$line30。$read$$iw$$iw。(:54)
在$line30。$read$$iw。(:56)
$line30.$read.(:58)
在$line30.$read$(:62)
在$line30.$read$()
在$line30.$eval$.$print$lzycompute(:7)
在$line30.$eval$.$print处(:6)
在$line30.$eval.$print()处
在sun.reflect.NativeMethodAccessorImpl.invoke0(本机方法)处
位于sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
在sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)中
位于java.lang.reflect.Method.invoke(Method.java:498)
在scala.tools.nsc.explorer.IMain$ReadEvalPrint.call中(IMain.scala:786)
位于scala.tools.nsc.explorer.IMain$Request.loadAndRun(IMain.scala:1047)
在scala.tools.nsc.explorer.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:638)
在scala.tools.nsc.explorer.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:637)
位于scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31)
在scala.reflect.internal.uti