当前位置:   article > 正文

Kafka-配置Kerberos安全认证(JDK8、JDK11)_kafka kerberos认证_kafka kerberos认证 java

kafka kerberos认证 java
三、根据自己的kafka版本引入依赖
org.apache.kafka kafka-clients 3.1.0
四、生产者样例代码

package com.example.demo.kafka;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.Properties;

/**

  • @Author: meng
  • @Version: 1.0
    */
    public class ProductKafkaKerberos {

public static void main(String[] args) {
String filePath = System.getProperty(“user.dir”) + “\conf\”;
System.setProperty(“java.security.auth.login.config”, filePath + “kafka_client_jaas.conf”);
System.setProperty(“java.security.krb5.conf”, filePath + “krb5.conf”);

Properties props = new Properties();
props.put(“bootstrap.servers”, “hdp-1:9092”);
props.put(“acks”, “all”);
props.put(“key.serializer”, “org.apache.kafka.common.serialization.StringSerializer”);
props.put(“value.serializer”, “org.apache.kafka.common.serialization.StringSerializer”);
// sasl
props.put(“jaas.enabled”, true);
props.put(“sasl.mechanism”, “GSSAPI”);
props.put(“security.protocol”, “SASL_PLAINTEXT”);
props.put(“sasl.kerberos.service.name”, “kafka”);

Producer<String, String> producer = new KafkaProducer<>(props);
for (int i = 0; i < 3; i++) {
producer.send(new ProducerRecord<String, String>(“test”, Integer.toString(i), Integer.toString(i)));
}
System.out.println(“producer is success”);
producer.close();
}

}

五、消费者样例代码

package com.example.demo.kafka;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;

/**

  • @Author: meng
  • @Version: 1.0
    */
    public class ConsumertKafkaKerberos {

public static void main(String[] args) {
String filePath = System.getProperty(“user.dir”) + “\conf\”;
System.setProperty(“java.security.auth.login.config”, filePath + “kafka_client_jaas.conf”);
System.setProperty(“java.security.krb5.conf”, filePath + “krb5.conf”);

Properties props = new Properties();
props.put(“bootstrap.servers”, “hdp-1:9092”);
props.put(“group.id”, “test_group”);
props.put(“enable.auto.commit”, “true”);
props.put(“auto.commit.interval.ms”, “1000”);
props.put(“key.deserializer”, “org.apache.kafka.common.serialization.StringDeserializer”);
props.put(“value.deserializer”, “org.apache.kafka.common.serialization.StringDeserializer”);
// sasl
props.put(“sasl.mechanism”, “GSSAPI”);
props.put(“security.protocol”, “SASL_PLAINTEXT”);
props.put(“sasl.kerberos.service.name”, “kafka”);

@SuppressWarnings(“resource”)
KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(props);
String topic = “test”;
consumer.subscribe(Arrays.asList(topic));
while (true) {
try {
ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(1000));
for (ConsumerRecord<String, String> record : records) {
System.out.printf(“offset = %d, partition = %d, key = %s, value = %s%n”,
record.offset(), record.partition(), record.key(), record.value());
}
} catch (Exception e) {

自我介绍一下,小编13年上海交大毕业,曾经在小公司待过,也去过华为、OPPO等大厂,18年进入阿里一直到现在。

深知大多数大数据工程师,想要提升技能,往往是自己摸索成长或者是报班学习,但对于培训机构动则几千的学费,着实压力不小。自己不成体系的自学效果低效又漫长,而且极易碰到天花板技术停滞不前!

因此收集整理了一份《2024年大数据全套学习资料》,初衷也很简单,就是希望能够帮助到想自学提升又不知道该从何学起的朋友。
img
img
img
img
img

既有适合小白学习的零基础资料,也有适合3年以上经验的小伙伴深入学习提升的进阶课程,基本涵盖了95%以上大数据开发知识点,真正体系化!

由于文件比较大,这里只是将部分目录大纲截图出来,每个节点里面都包含大厂面经、学习笔记、源码讲义、实战项目、讲解视频,并且后续会持续更新

如果你觉得这些内容对你有帮助,可以添加VX:vip204888 (备注大数据获取)
img

包含大厂面经、学习笔记、源码讲义、实战项目、讲解视频,并且后续会持续更新**

如果你觉得这些内容对你有帮助,可以添加VX:vip204888 (备注大数据获取)
[外链图片转存中…(img-VYbQFJbJ-1712526244580)]

声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/空白诗007/article/detail/950984
推荐阅读
相关标签
  

闽ICP备14008679号