Warning: file_get_contents(/data/phpspider/zhask/data//catemap/3/apache-spark/6.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Apache spark 火花卡桑德拉连接器错误 试图从spark Shell和spark Submit连接Cassandra,但都抛出相同的错误。_Apache Spark_Cassandra 2.0 - Fatal编程技术网

Apache spark 火花卡桑德拉连接器错误 试图从spark Shell和spark Submit连接Cassandra,但都抛出相同的错误。

Apache spark 火花卡桑德拉连接器错误 试图从spark Shell和spark Submit连接Cassandra,但都抛出相同的错误。,apache-spark,cassandra-2.0,Apache Spark,Cassandra 2.0,SPARK版本:1.2.0 Apache Cassandra版本2.1.1使用Datastax Cassandra驱动程序和连接器连接Spark 1.2.0(版本在POM文件中列出)。除了cassandra,scala或java程序也可以正常工作。请找人帮助解决此错误 错误: `**java.lang.AbstractMethodError at org.apache.spark.Logging$class.log(Logging.scala:52) at com.datasta

SPARK版本:1.2.0

Apache Cassandra版本2.1.1使用Datastax Cassandra驱动程序和连接器连接Spark 1.2.0(版本在POM文件中列出)。除了cassandra,scala或java程序也可以正常工作。请找人帮助解决此错误

错误:

`**java.lang.AbstractMethodError
    at org.apache.spark.Logging$class.log(Logging.scala:52)
    at com.datastax.spark.connector.cql.CassandraConnector$.log(CassandraConnector.scala:144)
    at org.apache.spark.Logging$class.logDebug(Logging.scala:63)
    at com.datastax.spark.connector.cql.CassandraConnector$.logDebug(CassandraConnector.scala:144)**
    at com.datastax.spark.connector.cql.CassandraConnector$.com$datastax$spark$connector$cql$CassandraConnector$$createSession(CassandraConnector.scala:154)
    at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$2.apply(CassandraConnector.scala:151)
    at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$2.apply(CassandraConnector.scala:151)
    at com.datastax.spark.connector.cql.RefCountedCache.createNewValueAndKeys(RefCountedCache.scala:36)
    at com.datastax.spark.connector.cql.RefCountedCache.acquire(RefCountedCache.scala:61)
    at com.datastax.spark.connector.cql.CassandraConnector.openSession(CassandraConnector.scala:73)
    at com.datastax.spark.connector.cql.CassandraConnector.withSessionDo(CassandraConnector.scala:98)
    at com.datastax.spark.connector.cql.CassandraConnector.withClusterDo(CassandraConnector.scala:109)
    at com.datastax.spark.connector.cql.Schema$.fromCassandra(Schema.scala:131)
    at com.datastax.spark.connector.rdd.CassandraRDD.tableDef$lzycompute(CassandraRDD.scala:206)
    at com.datastax.spark.connector.rdd.CassandraRDD.tableDef(CassandraRDD.scala:205)
    at com.datastax.spark.connector.rdd.CassandraRDD.<init>(CassandraRDD.scala:212)
    at com.datastax.spark.connector.SparkContextFunctions.cassandraTable(SparkContextFunctions.scala:48)
    at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:25)
    at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:30)
    at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:32)
    at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:34)
    at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:36)
    at $iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:38)
    at $iwC$$iwC$$iwC$$iwC.<init>(<console>:40)
    at $iwC$$iwC$$iwC.<init>(<console>:42)
    at $iwC$$iwC.<init>(<console>:44)
    at $iwC.<init>(<console>:46)
    at <init>(<console>:48)
    at .<init>(<console>:52)
    at .<clinit>(<console>)
    at .<init>(<console>:7)
    at .<clinit>(<console>)
    at $print(<console>)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:606)
    at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:852)
    at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1125)
    at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:674)
    at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:705)
    at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:669)
    at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:828)
    at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:873)
    at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:785)
    at org.apache.spark.repl.SparkILoop.processLine$1(SparkILoop.scala:628)
    at org.apache.spark.repl.SparkILoop.innerLoop$1(SparkILoop.scala:636)
    at org.apache.spark.repl.SparkILoop.loop(SparkILoop.scala:641)
    at org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:968)
    at org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
    at org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
    at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
    at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:916)
    at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1011)
    at org.apache.spark.repl.Main$.main(Main.scala:31)
    at org.apache.spark.repl.Main.main(Main.scala)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:606)
    at org.apache.spark.deploy.SparkSubmit$.launch(SparkSubmit.scala:358)
    at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:75)
    at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)`
Java代码:

scala> import com.datastax.spark.connector._
scala> val conf = new SparkConf()
scala> conf.set("cassandra.connection.host", "node1.pc.datastax.com")
scala> val sc = new SparkContext("local[2]", "Cassandra Connector Test", conf)
scala> val table = sc.cassandraTable("keyspace", "table")
scala> table.count
package com.madhes;
导入com.datastax.driver.core.Session;
导入com.datastax.spark.connector.cql.CassandraConnector;
导入com.google.common.base.Optional;
导入org.apache.spark.SparkConf;
导入org.apache.spark.api.java.javapairdd;
导入org.apache.spark.api.java.JavaRDD;
导入org.apache.spark.api.java.JavaSparkContext;
导入org.apache.spark.api.java.function.FlatMapFunction;
导入org.apache.spark.api.java.function.function;
导入org.apache.spark.api.java.function.Function2;
导入org.apache.spark.api.java.function.PairFlatMapFunction;
导入scala.Tuple2;
导入java.io.Serializable;
导入java.math.BigDecimal;
导入java.text.MessageFormat;
导入java.util.*;
导入静态com.datastax.spark.connector.CassandraJavaUtil.*;
公共类应用程序实现可序列化{
专用瞬态SparkConf配置;
专用应用程序(SparkConf配置){
this.conf=conf;
}
私家车{
JavaSparkContext sc=新的JavaSparkContext(conf);
生成数据(sc);
计算机(sc);
展示结果(sc);
sc.停止();
}
私有void generateData(JavaSparkContext sc){
CassandraConnector connector=CassandraConnector.apply(sc.getConf());
//准备模式
try(Session Session=connector.openSession()){
//执行(“如果存在java_api,则删除键空间”);
//execute(“使用replication={'class':'SimpleStrategy','replication_factor':1}创建键空间java_api”);
//执行(“创建表java_api.products(id INT主键、名称文本、父列表)”;
//执行(“创建表java_api.sales(id UUID主键,product INT,price DECIMAL)”;
//执行(“创建表java_api.summaries(product INT主键,summary DECIMAL)”);
}
//准备产品层次结构
List products=Arrays.asList(
新产品(0,“所有产品”,Collections.emptyList()),
新产品(1,“产品A”,数组.asList(0)),
新产品(4,“产品A1”,数组。asList(0,1)),
新产品(5,“产品A2”,数组。asList(0,1)),
新产品(2,“产品B”,数组.asList(0)),
新产品(6,“产品B1”,数组。asList(0,2)),
新产品(7,“产品B2”,数组.asList(0,2)),
新产品(3,“产品C”,数组.asList(0)),
新产品(8,“产品C1”,数组。asList(0,3)),
新产品(9,“产品C2”,阵列。asList(0,3))
);
JavaRDD productsRDD=sc.parallelize(产品);
javaFunctions(productsRDD,Product.class).saveToCassandra(“java_api”,“产品”);
JavaRDD salesRDD=productsRDD.filter(新函数(){
@凌驾
公共布尔调用(产品)引发异常{
返回product.getParents().size()==2;
}
}).flatMap(新的flatMap函数(){
@凌驾
公共Iterable调用(产品)引发异常{
随机=新随机();
列表销售额=新阵列列表(1000);
对于(int i=0;i<1000;i++){
添加(新销售(UUID.randomuid()、product.getId()、BigDecimal.valueOf(random.nextDouble()));
}
退货;
}
});
javaFunctions(salesRDD,Sale.class).saveToCassandra(“java_api”,“sales”);
}
私有void计算(JavaSparkContext sc){
javapairdd productsRDD=javaFunctions(sc)
.cassandraTable(“java_api”,“产品”,Product.class)
.keyBy(新函数(){
@凌驾
公共整数调用(产品)引发异常{
return product.getId();
}
});
javapairdd salesRDD=javaFunctions(sc)
.cassandraTable(“java_api”,“sales”,Sale.class)
.keyBy(新函数(){
@凌驾
公共整数调用(销售)引发异常{
return sale.getProduct();
}
});
javapairdd joinedd=salesRDD.join(productsRDD);
javapairdd allSalesRDD=joinedd.flatMapToPair(新的PairFlatMapFunction(){
@凌驾
公共Iterable调用(Tuple2输入)引发异常{
Tuple2 saleWithProduct=input._2();
List allSales=new ArrayList(saleWithProduct._2().getParents().size()+1);
allSales.add(新的Tuple2(saleWithProduct.\u 1().getProduct(),saleWithProduct.\u 1().getPrice());
对于(整数父产品:saleWithProduct.\u 2().getParents()){
allSales.add(新的Tuple2(parentProduct,saleWithProduct._1().getPrice());
}
退还所有销售额;
}
});
JavaRDD SUMMARESRDD=allSalesRDD.reduceByKey(新函数2(){
@凌驾
公共BigDecimal调用(BigDecimal v1,BigDecimal v2)引发异常{
返回v1.add(v2);
}
}).map(新函数(){
@凌驾
公共摘要调用(Tuple2输入)引发异常{
返回新的摘要(input._1(),input._2());
}
});
javaFunctions(摘要),
package com.madhes;

import com.datastax.driver.core.Session;
import com.datastax.spark.connector.cql.CassandraConnector;
import com.google.common.base.Optional;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import scala.Tuple2;
 
import java.io.Serializable;
import java.math.BigDecimal;
import java.text.MessageFormat;
import java.util.*;
 
import static com.datastax.spark.connector.CassandraJavaUtil.*;
 
public class App implements Serializable {
    private transient SparkConf conf;
 
    private App(SparkConf conf) {
        this.conf = conf;
    }
 
    private void run() {
        JavaSparkContext sc = new JavaSparkContext(conf);
        generateData(sc);
        compute(sc);
        showResults(sc);
        sc.stop();
    }
 
    private void generateData(JavaSparkContext sc) {
        CassandraConnector connector = CassandraConnector.apply(sc.getConf());
 
        // Prepare the schema
        try (Session session = connector.openSession()) {
           // session.execute("DROP KEYSPACE IF EXISTS java_api");
         //   session.execute("CREATE KEYSPACE java_api WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1}");
         //   session.execute("CREATE TABLE java_api.products (id INT PRIMARY KEY, name TEXT, parents LIST<INT>)");
         //   session.execute("CREATE TABLE java_api.sales (id UUID PRIMARY KEY, product INT, price DECIMAL)");
         //   session.execute("CREATE TABLE java_api.summaries (product INT PRIMARY KEY, summary DECIMAL)");
        }
 
        // Prepare the products hierarchy
        List<Product> products = Arrays.asList(
                new Product(0, "All products", Collections.<Integer>emptyList()),
                new Product(1, "Product A", Arrays.asList(0)),
                new Product(4, "Product A1", Arrays.asList(0, 1)),
                new Product(5, "Product A2", Arrays.asList(0, 1)),
                new Product(2, "Product B", Arrays.asList(0)),
                new Product(6, "Product B1", Arrays.asList(0, 2)),
                new Product(7, "Product B2", Arrays.asList(0, 2)),
                new Product(3, "Product C", Arrays.asList(0)),
                new Product(8, "Product C1", Arrays.asList(0, 3)),
                new Product(9, "Product C2", Arrays.asList(0, 3))
        );
 
        JavaRDD<Product> productsRDD = sc.parallelize(products);
        javaFunctions(productsRDD, Product.class).saveToCassandra("java_api", "products");
 
        JavaRDD<Sale> salesRDD = productsRDD.filter(new Function<Product, Boolean>() {
            @Override
            public Boolean call(Product product) throws Exception {
                return product.getParents().size() == 2;
            }
        }).flatMap(new FlatMapFunction<Product, Sale>() {
            @Override
            public Iterable<Sale> call(Product product) throws Exception {
                Random random = new Random();
                List<Sale> sales = new ArrayList<>(1000);
                for (int i = 0; i < 1000; i++) {
                    sales.add(new Sale(UUID.randomUUID(), product.getId(), BigDecimal.valueOf(random.nextDouble())));
                }
                return sales;
            }
        });
 
        javaFunctions(salesRDD, Sale.class).saveToCassandra("java_api", "sales");
    }
 
    private void compute(JavaSparkContext sc) {
        JavaPairRDD<Integer, Product> productsRDD = javaFunctions(sc)
                .cassandraTable("java_api", "products", Product.class)
                .keyBy(new Function<Product, Integer>() {
                    @Override
                    public Integer call(Product product) throws Exception {
                        return product.getId();
                    }
                });
 
        JavaPairRDD<Integer, Sale> salesRDD = javaFunctions(sc)
                .cassandraTable("java_api", "sales", Sale.class)
                .keyBy(new Function<Sale, Integer>() {
                    @Override
                    public Integer call(Sale sale) throws Exception {
                        return sale.getProduct();
                    }
                });
 
        JavaPairRDD<Integer, Tuple2<Sale, Product>> joinedRDD = salesRDD.join(productsRDD);
 
        JavaPairRDD<Integer, BigDecimal> allSalesRDD = joinedRDD.flatMapToPair(new PairFlatMapFunction<Tuple2<Integer, Tuple2<Sale, Product>>, Integer, BigDecimal>() {
            @Override
            public Iterable<Tuple2<Integer, BigDecimal>> call(Tuple2<Integer, Tuple2<Sale, Product>> input) throws Exception {
                Tuple2<Sale, Product> saleWithProduct = input._2();
                List<Tuple2<Integer, BigDecimal>> allSales = new ArrayList<>(saleWithProduct._2().getParents().size() + 1);
                allSales.add(new Tuple2<>(saleWithProduct._1().getProduct(), saleWithProduct._1().getPrice()));
                for (Integer parentProduct : saleWithProduct._2().getParents()) {
                    allSales.add(new Tuple2<>(parentProduct, saleWithProduct._1().getPrice()));
                }
                return allSales;
            }
        });
 
        JavaRDD<Summary> summariesRDD = allSalesRDD.reduceByKey(new Function2<BigDecimal, BigDecimal, BigDecimal>() {
            @Override
            public BigDecimal call(BigDecimal v1, BigDecimal v2) throws Exception {
                return v1.add(v2);
            }
        }).map(new Function<Tuple2<Integer, BigDecimal>, Summary>() {
            @Override
            public Summary call(Tuple2<Integer, BigDecimal> input) throws Exception {
                return new Summary(input._1(), input._2());
            }
        });
 
        javaFunctions(summariesRDD, Summary.class).saveToCassandra("java_api", "summaries");
    }
 
    private void showResults(JavaSparkContext sc) {
        JavaPairRDD<Integer, Summary> summariesRdd = javaFunctions(sc)
                .cassandraTable("java_api", "summaries", Summary.class)
                .keyBy(new Function<Summary, Integer>() {
                    @Override
                    public Integer call(Summary summary) throws Exception {
                        return summary.getProduct();
                    }
                });
 
        JavaPairRDD<Integer, Product> productsRdd = javaFunctions(sc)
                .cassandraTable("java_api", "products", Product.class)
                .keyBy(new Function<Product, Integer>() {
                    @Override
                    public Integer call(Product product) throws Exception {
                        return product.getId();
                    }
                });
 
        List<Tuple2<Product, Optional<Summary>>> results = productsRdd.leftOuterJoin(summariesRdd).values().toArray();
 
        for (Tuple2<Product, Optional<Summary>> result : results) {
            System.out.println(result);
        }
    }
 
    public static void main(String[] args) {
        if (args.length != 2) {
            System.err.println("Syntax: com.datastax.spark.demo.JavaDemo <Spark Master URL> <Cassandra contact point>");
            System.exit(1);
        }
 
        SparkConf conf = new SparkConf();
        conf.setAppName("Java API demo");
        conf.setMaster(args[0]);
        conf.set("spark.cassandra.connection.host", args[1]);
 
        App app = new App(conf);
        app.run();
    }
 
    public static class Product implements Serializable {
        private Integer id;
        private String name;
        private List<Integer> parents;
 
        public Product() { }
 
        public Product(Integer id, String name, List<Integer> parents) {
            this.id = id;
            this.name = name;
            this.parents = parents;
        }
 
        public Integer getId() { return id; }
        public void setId(Integer id) { this.id = id; }
 
        public String getName() { return name; }
        public void setName(String name) { this.name = name; }
 
        public List<Integer> getParents() { return parents; }
        public void setParents(List<Integer> parents) { this.parents = parents; }
 
        @Override
        public String toString() {
            return MessageFormat.format("Product'{'id={0}, name=''{1}'', parents={2}'}'", id, name, parents);
        }
    }
 
    public static class Sale implements Serializable {
        private UUID id;
        private Integer product;
        private BigDecimal price;
 
        public Sale() { }
 
        public Sale(UUID id, Integer product, BigDecimal price) {
            this.id = id;
            this.product = product;
            this.price = price;
        }
 
        public UUID getId() { return id; }
        public void setId(UUID id) { this.id = id; }
 
        public Integer getProduct() { return product; }
        public void setProduct(Integer product) { this.product = product; }
 
        public BigDecimal getPrice() { return price; }
        public void setPrice(BigDecimal price) { this.price = price; }
 
        @Override
        public String toString() {
            return MessageFormat.format("Sale'{'id={0}, product={1}, price={2}'}'", id, product, price);
        }
    }
 
    public static class Summary implements Serializable {
        private Integer product;
        private BigDecimal summary;
 
        public Summary() { }
 
        public Summary(Integer product, BigDecimal summary) {
            this.product = product;
            this.summary = summary;
        }
 
        public Integer getProduct() { return product; }
        public void setProduct(Integer product) { this.product = product; }
 
        public BigDecimal getSummary() { return summary; }
        public void setSummary(BigDecimal summary) { this.summary = summary; }
 
        @Override
        public String toString() {
            return MessageFormat.format("Summary'{'product={0}, summary={1}'}'", product, summary);
        }
    }
 
}
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
 
    <groupId>com.madhes</groupId>
    <artifactId>App</artifactId>
    <version>1.0-SNAPSHOT</version>
 
    <dependencies>
        <!--Spark Cassandra Connector-->
        <dependency>
            <groupId>com.datastax.spark</groupId>
            <artifactId>spark-cassandra-connector_2.10</artifactId>
            <version>1.0.0</version>
        </dependency>
        <dependency>
            <groupId>com.datastax.spark</groupId>
            <artifactId>spark-cassandra-connector-java_2.10</artifactId>
            <version>1.0.0</version>
        </dependency>
 
        <!--Spark-->
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-core_2.10</artifactId>
            <version>1.2.0</version>
        </dependency>
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-streaming_2.10</artifactId>
            <version>1.2.0</version>
        </dependency>
        
        
       <dependency>
    <groupId>net.jpountz.lz4</groupId>
    <artifactId>lz4</artifactId>
    <version>1.3.0</version>
  </dependency>
        
   

    <dependency>
        <groupId>com.datastax.cassandra</groupId>
        <artifactId>cassandra-driver-core</artifactId>
        <version>2.1.0</version>
        <exclusions>
            <exclusion>
                <groupId>org.slf4j</groupId>
                <artifactId>slf4j-log4j12</artifactId>
            </exclusion>
            <exclusion>
                <groupId>log4j</groupId>
                <artifactId>log4j</artifactId>
            </exclusion>
            <exclusion>
                <groupId>org.apache.thrift</groupId>
                <artifactId>libthrift</artifactId>
            </exclusion>
        </exclusions>
    </dependency>

    <dependency>
        <groupId>org.apache.thrift</groupId>
        <artifactId>libthrift</artifactId>
        <version>0.9.1</version>
    </dependency>
        
    </dependencies>
</project>
<dependency>
            <groupId>org.apache.cassandra</groupId>
            <artifactId>cassandra-all</artifactId>
            <version>1.2.6</version>
            <exclusions>
                <!-- <exclusion> <groupId>com.google.guava</groupId> <artifactId>guava</artifactId> 
                    </exclusion> -->
                <exclusion>
                    <groupId>com.googlecode.concurrentlinkedhashmap</groupId>
                    <artifactId>concurrentlinkedhashmap-lru</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>com.ning</groupId>
                    <artifactId>compress-lzf</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>io.netty</groupId>
                    <artifactId>netty</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>jline</groupId>
                    <artifactId>jline</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.apache.cassandra.deps</groupId>
                    <artifactId>avro</artifactId>
                </exclusion>
            </exclusions>
</dependency>