天天看点

kafka-Consumer-SSL

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.JSONPObject;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.config.SslConfigs;

import java.io.IOException;
import java.io.InputStream;
import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;

public class KafkaConsumer {

    private static volatile boolean isRunning = true;

    public static void main(String[] args) throws IOException
    {
        Properties environment  = new Properties();
        InputStream is = KafkaProductMsg.class.getClassLoader().getResourceAsStream("config/application.properties");
        environment.load(is);

        // 配置SSL
        if (null == System.getProperty("java.security.auth.login.config")) {
            System.setProperty("java.security.auth.login.config", environment.getProperty("java.security.auth.login.config"));
        }

        Properties kafkaProps = new Properties();
        kafkaProps.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_SSL");
        kafkaProps.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, environment.getProperty("ssl.truststore.location"));
        kafkaProps.put(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, "");
        kafkaProps.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "KafkaOnsClient");
        kafkaProps.put(SaslConfigs.SASL_MECHANISM, "PLAIN");

        kafkaProps.put("bootstrap.servers", "h1:9093,h2:9093,h3:9093");
        kafkaProps.put("enable.auto.commit", false);  //注意这里设置为手动提交方式
        kafkaProps.put("group.id", "data-stream-group");
        kafkaProps.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        kafkaProps.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        String topic = "h3yun-bi-web-active-topic";

        Consumer<String,String> consumer = new org.apache.kafka.clients.consumer.KafkaConsumer<String, String>(kafkaProps);
        consumer.subscribe(Arrays.asList(topic));

        try {
            while (isRunning) {
                ConsumerRecords<String, String> consumerRecords = consumer.poll(Duration.ofSeconds(1));
                if (!consumerRecords.isEmpty()) {
                    for (ConsumerRecord<String, String> consumerRecord : consumerRecords) {
                        System.out.println(
                                "TopicName: " + consumerRecord.topic() +
                                " Partition:" + consumerRecord.partition() +
                                " Offset:" + consumerRecord.offset());

                        String json =  consumerRecord.value();
                        JSONObject p = JSON.parseObject(json);
                        String time = p.getString("event_timestamp");
                        System.out.println("-----receive---->"+time);
                        //进行逻辑处理
                    }
                    consumer.commitAsync();//异步提交
                }
            }
        }catch (Exception e){
            //处理异常
        }
        finally {
            consumer.commitAsync();
            isRunning = false;
            if (consumer != null) {
                consumer.close();
            }
        }

    }
}
           
<!-- kafka相关依赖-->
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-streaming-kafka-0-10_2.12</artifactId>
            <version>3.1.1</version>
            <scope>compile</scope>
        </dependency>