Apache kafka ApacheKafka喷口SSL

Apache kafka ApacheKafka喷口SSL,apache-kafka,kafka-consumer-api,apache-storm,apache-kafka-security,Apache Kafka,Kafka Consumer Api,Apache Storm,Apache Kafka Security,我们正在创建一个带有卡夫卡喷口的风暴拓扑。卡夫卡上的数据使用生产者提供的SSL加密。我们使用直接风暴螺栓集成来访问卡夫卡喷口的数据,如下所示 import java.io.FileInputStream; import java.io.InputStream; import java.util.List; import java.util.Properties; import org.apache.log4j.Logger; import org.apache.storm.Config; imp

我们正在创建一个带有卡夫卡喷口的风暴拓扑。卡夫卡上的数据使用生产者提供的SSL加密。我们使用直接风暴螺栓集成来访问卡夫卡喷口的数据,如下所示

import java.io.FileInputStream;
import java.io.InputStream;
import java.util.List;
import java.util.Properties;
import org.apache.log4j.Logger;
import org.apache.storm.Config;
import org.apache.storm.LocalCluster;
import org.apache.storm.StormSubmitter;
import org.apache.storm.kafka.BrokerHosts;
import org.apache.storm.kafka.KafkaSpout;
import org.apache.storm.kafka.SpoutConfig;
import org.apache.storm.kafka.StringScheme;
import org.apache.storm.kafka.ZkHosts;
import org.apache.storm.spout.SchemeAsMultiScheme;
import org.apache.storm.topology.TopologyBuilder;
import org.apache.storm.tuple.Fields;
import org.apache.storm.utils.Utils;

   public class Topology {

public static void main(String[] args) throws Exception {

    TopologyBuilder builder = new TopologyBuilder();
    Config conf = new Config();
    // getting properties from topology.properties
    Properties prop = new Properties();
    InputStream input = null;
    input = new FileInputStream(args[0]);
    prop.load(input);
    spoutId = prop.getProperty("spoutId");
    topologyName = prop.getProperty("topologyName");
    zkhost = prop.getProperty("zkhost");

    // setting workers for each bolt from properties
    numSpout = Integer.parseInt(prop.getProperty("numSpout"));
    numConverter = Integer.parseInt(prop.getProperty("numConverter"));
    numTaskSpout = Integer.parseInt(prop.getProperty("numTaskSpout"));
    numTaskConverter = Integer.parseInt(prop.getProperty("numTaskConverter"));

    SpoutConfig spoutConfig = new SpoutConfig(hosts, inputTopic, "/" + KafkaBroker, consumerGroup);
    spoutConfig.scheme = new SchemeAsMultiScheme(new StringScheme());
    spoutConfig.startOffsetTime = kafka.api.OffsetRequest.LatestTime();
    spoutConfig.maxOffsetBehind = Long.MAX_VALUE;
    spoutConfig.useStartOffsetTimeIfOffsetOutOfRange = true;
    spoutConfig.ignoreZkOffsets = true;
    spoutConfig.zkPort = 2181;
    spoutConfig.socketTimeoutMs = socketTimeoutMs;
    spoutConfig.fetchSizeBytes = fetchSizeBytes;

    KafkaSpout kafkaSpout = new KafkaSpout(spoutConfig);
    builder.setSpout(spoutId, kafkaSpout, numSpout).setNumTasks(numTaskSpout)
            .addConfiguration("offset.commit.period.ms", offsetCommitPeriodMs)
            .addConfiguration("max.uncommitted.offsets", maxUncommittedOffsets)
            .addConfiguration("poll.timeout.ms", pollTimeoutMs);

    builder.setBolt("Converter", new Converter(), numConverter).shuffleGrouping(spoutId)
            .setNumTasks(numTaskConverter);
我们如何配置卡夫卡喷口来访问加密数据