[Kafka Security]Kafka配置SASL
Document:kafka.apache.org/28/document…
1. Api
2. Kafka配置SASL
2.1 Kafka SCRAM和PLAIN
PLAIN认证有个问题,就是不能动态新增用户,每次添加用户后,需要重启正在运行的Kafka集群才能生效。因此,在生产环境,这种认证方式不符合实际业务场景。而SCRAM不一样,使用SCRAM认证,可以动态新增用户,添加用户后,可以不用重启正在运行的Kafka集群即可进行鉴权。
2.2 实战
- 1.进入kafka的安装目录
cd /opt/kafka/config
- 2.创建kafka_server_jaas.conf
KafkaServer使用user_<name>来定义多个用户,供客户端程序(生产者、消费者程序)认证使用
Client配置,主要是broker链接到zookeeper,从上文的Zookeeper JAAS文件中选择一个用户,填写用户名和密码即可。
KafkaServer {
org.apache.kafka.common.security.scram.ScramLoginModule required
username="admin"
password="admin"
user_admin="admin"
user_nolan="nolan";
};
Client{
org.apache.kafka.common.security.scram.ScramLoginModule required
username="nolan"
password="nolan";
};
- 3.配置server.properties
PLAINTEXT:明文
############################# Server Basics #############################
# The id of the broker. This must be set to a unique integer for each broker.
broker.id=0
listeners=SASL_PLAINTEXT://10.xxx.xxx.xxx:9092
security.inter.broker.protocol=SASL_PLAINTEXT
#security.inter.broker.protocol=SASL_PLAINTEXT (or SASL_SSL) 前后一致
sasl.mechanism.inter.broker.protocol=SCRAM-SHA-256 (or SCRAM-SHA-512)
sasl.enabled.mechanisms=SCRAM-SHA-256
allow.everyone.if.no.acl.found=true
authorizer.class.name=kafka.security.auth.SimpleAclAuthorizer
- 4.kafka启动脚本加入配置
kafka-server-start.sh
vim bin/sasl-kafka-server-start.sh
# 源代码
if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then
export KAFKA_HEAP_OPTS="-Xmx1G -Xms1G"
fi
# 修改为
if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then
export KAFKA_HEAP_OPTS="-Xmx1G -Xms1G -Djava.security.auth.login.config=/opt/kafka/config/kafka_server_jaas.conf"
fi
- 5.配置生产者和消费者
Configure the following properties in producer.properties or consumer.properties
security.protocol=SASL_PLAINTEXT
sasl.mechanism=SCRAM-SHA-256 (or SCRAM-SHA-512)
- 6.启动kafka
kafka-server-start.sh -daemon /opt/kafka/config/server.properties
2.3 Java Code
- 1.Dependency
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>3.1.0</version>
</dependency>
- 2.Producer
public void createProducerToSendMsg() {
Properties props = new Properties();
props.put("bootstrap.servers", "localhost:9092");
#props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
props.put("acks", "all");
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("security.protocol", "SASL_PLAINTEXT");
props.put("sasl.mechanism", "SCRAM-SHA-512");
String jaas = "org.apache.kafka.common.security.scram.ScramLoginModule required username="{username}" password="{password}";";
props.put("sasl.jaas.config", jaas.replace("{username}", "nolan").replace("{password}", “nolan”));
Producer<String, String> producer = new KafkaProducer<>(props);
for (int i = 0; i < 100; i++){
producer.send(new ProducerRecord<String, String>("my-topic", Integer.toString(i), Integer.toString(i)));
}
producer.close();
}
- 3.Consumer
public void createConsumerToSubscribeTopic() {
Properties props = new Properties();
props.put("bootstrap.servers", "localhost:9092");
#props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("group.id", "Nolan123456");
props.put("fetch.max.bytes", 1024);
props.put("enable.auto.commit", true);
props.put("auto.commit.interval.ms", 1000);
props.put("security.protocol", "SASL_PLAINTEXT");
props.put("sasl.mechanism", "SCRAM-SHA-512");
props.put("auto.offset.reset","earliest");
String jaas = "org.apache.kafka.common.security.scram.ScramLoginModule required username="{username}" password="{password}";";
props.put("sasl.jaas.config", jaas.replace("{username}", "nolan").replace("{password}", “nolan”));
Consumer<String, String> consumer = new KafkaConsumer<>(props);
consumer.subscribe(Arrays.asList("foo", "bar"));
while (true) {
ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100));
for (ConsumerRecord<String, String> record : records)
System.out.printf("offset = %d, key = %s, value = %s%n", record.offset(), record.key(), record.value());
}
}
- 4.Admin
public AdminClient creatAdminClient() {
String brokerUrl = kafkaConfig.getBrokerUrl();
Properties props = new Properties();
props.put("bootstrap.servers", "localhost:9092");
#props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
props.put(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, 30000);
props.put("security.protocol","SASL_PLAINTEXT");
props.put("sasl.mechanism", "SCRAM-SHA-512");
String jaas="org.apache.kafka.common.security.scram.ScramLoginModule required username="{username}" password="{password}";";
props.put("sasl.jaas.config", jaas.replace("{username}", "nolan").replace("{password}", “nolan”));
AdminClient client = AdminClient.create(props);
return client;
}
3. Create User/Permission
Version 不得低于2.7.x,否则没有此api。
3.1 创建用户
public static void addUser(String account, String password) throws ExecutionException, InterruptedException, IOException {
//获取AdminClient连接
AdminClient adminClient = creatAdminClient();
//创建User列表
List<UserScramCredentialAlteration> alterations = new ArrayList<>();
//构造Scram认证机制信息
ScramCredentialInfo info = new ScramCredentialInfo(org.apache.kafka.clients.admin.ScramMechanism.SCRAM_SHA_512, 8192);
//用户信息
UserScramCredentialAlteration userScramCredentialAdd = new UserScramCredentialUpsertion(account, info, password);
//添加用户信息到集合
alterations.add(userScramCredentialAdd);
//创建用户,并拿到返回结果
AlterUserScramCredentialsResult result = adminClient.alterUserScramCredentials(alterations);
result.all().get();
}
3.2 删除用户
public static void deleteUser(String account) throws ExecutionException, InterruptedException, IOException {
AdminClient adminClient = creatAdminClient();
//创建删除列表
List<UserScramCredentialAlteration> alterations = new ArrayList<>();
//构建删除用的UserScramCredentialAlteration
UserScramCredentialAlteration userScramCredentialDel = new UserScramCredentialDeletion(account, org.apache.kafka.clients.admin.ScramMechanism.SCRAM_SHA_512);
//添加认证信息到列表
alterations.add(userScramCredentialDel);
//执行方法,并拿到返回结果
AlterUserScramCredentialsResult result = adminClient.alterUserScramCredentials(alterations);
//阻塞等待结果完成
result.all().get();
}
3.3 获取所有用户信息
public static void describeAccount() throws ExecutionException, InterruptedException {
AdminClient adminClient = creatAdminClient();
//查询所有的账户,这也是默认方法
DescribeUserScramCredentialsResult result = adminClient.describeUserScramCredentials();
//执行方法,并拿到返回结果
java.util.Map<String, UserScramCredentialsDescription> future = result.all().get();
//输出
future.forEach((name, info) -> System.out.println("[ScramUserName:" + name + "]:[ScramUserInfo:" + info.toString() + "]"));
}
3.4 授权用户消费组读权限
public static void addGroupReadAcl(String account, String consumerGroup) throws IOException, ExecutionException, InterruptedException {
//获取AdminClient连接
AdminClient adminClient = creatAdminClient();
//绑定消费组
ResourcePattern resourcePatternGroup = new ResourcePattern(ResourceType.GROUP, consumerGroup, PatternType.LITERAL);
//绑定用户、权限
AccessControlEntry accessControlEntryRead = new AccessControlEntry("User:" + account, "*", AclOperation.READ, AclPermissionType.ALLOW);
//绑定用户、权限、消费组
AclBinding aclBindingGroup = new AclBinding(resourcePatternGroup, accessControlEntryRead);
Collection<AclBinding> aclBindingCollection = new ArrayList<>();
aclBindingCollection.add(aclBindingGroup); //添加到集合
CreateAclsResult aclResult = adminClient.createAcls(aclBindingCollection);
KafkaFuture<Void> result = aclResult.all();
result.get(); //执行
}
3.5 移除用户消费组读权限
public static void deleteGroupReadAcl(String account, String consumerGroup) throws IOException, ExecutionException, InterruptedException {
//获取AdminClient连接
AdminClient adminClient = creatAdminClient();
//绑定消费组
ResourcePatternFilter resourcePatternFilterGroup = new ResourcePatternFilter(ResourceType.GROUP, consumerGroup, PatternType.LITERAL);
//绑定用户、权限
AccessControlEntryFilter accessControlEntryFilter = new AccessControlEntryFilter("User:" + account, "*", AclOperation.READ, AclPermissionType.ALLOW);
//绑定用户、权限、消费组
AclBindingFilter aclBindingGroup = new AclBindingFilter(resourcePatternFilterGroup, accessControlEntryFilter);
Collection<AclBindingFilter> aclBindingCollection = new ArrayList<>();
aclBindingCollection.add(aclBindingGroup);//添加到集合
DeleteAclsResult aclResult = adminClient.deleteAcls(aclBindingCollection);
KafkaFuture<Collection<AclBinding>> result = aclResult.all();
result.get();
}
3.6 授权用户Topic读写权限
public static void addTopicReadOrWriterAcl(String topic, String account) throws IOException, ExecutionException, InterruptedException {
//获取AdminClient连接
AdminClient adminClient = creatAdminClient();
//绑定主题
ResourcePattern resourcePattern = new ResourcePattern(ResourceType.TOPIC, topic, PatternType.LITERAL);
//绑定用户和权限
AccessControlEntry accessControlEntryRead = new AccessControlEntry("User:" + account, "*", AclOperation.WRITE/READ, AclPermissionType.ALLOW);
//绑定用户、主题、权限
AclBinding aclBindingRead = new AclBinding(resourcePattern, accessControlEntryRead);
//赋予权限
Collection<AclBinding> aclBindingCollection = new ArrayList<>();
aclBindingCollection.add(aclBindingRead); //添加到集合
CreateAclsResult aclResult = adminClient.createAcls(aclBindingCollection);
KafkaFuture<Void> result = aclResult.all();
result.get(); //执行
}
3.7 移除用户Topic读写权限
public static void deleteTopicReadOrWriterAcl(String topic, String account) throws IOException, ExecutionException, InterruptedException {
//获取AdminClient连接
AdminClient adminClient = creatAdminClient();
//绑定主题
ResourcePatternFilter resourcePatternFilter = new ResourcePatternFilter(ResourceType.TOPIC, topic, PatternType.LITERAL);
//绑定用户、权限
AccessControlEntryFilter accessControlEntryFilter = new AccessControlEntryFilter("User:" + account, "*", AclOperation.WRITE/READ, AclPermissionType.ALLOW);
//绑定用户、主题、权限
AclBindingFilter aclBinding = new AclBindingFilter(resourcePatternFilter, accessControlEntryFilter);
//移除权限
Collection<AclBindingFilter> aclBindingCollection = new ArrayList<>();
aclBindingCollection.add(aclBinding);
DeleteAclsResult aclResult = adminClient.deleteAcls(aclBindingCollection);
KafkaFuture<Collection<AclBinding>> result = aclResult.all();
result.get();
}
3.8 批量移除用户Topic读写权限
public static void batchDeleteTopicReadOrWriterAcl(List<KafkaAuthority> kafkaAuthoritys) throws IOException, ExecutionException, InterruptedException {
//获取AdminClient连接
AdminClient adminClient = creatAdminClient();
adminClient.close();
//移除权限集合
Collection<AclBindingFilter> aclBindingCollection = new ArrayList<>();
for (KafkaAuthority kafkaAuthority : kafkaAuthoritys) {
//绑定主题
ResourcePatternFilter resourcePatternFilter = new ResourcePatternFilter(ResourceType.TOPIC, kafkaAuthority.getTopic(), PatternType.LITERAL);
//绑定用户、权限
AccessControlEntryFilter accessControlEntryFilter = new AccessControlEntryFilter("User:" + kafkaAuthority.getAccount(), "*", AclOperation.WRITE/READ, AclPermissionType.ALLOW);
//绑定用户、主题、权限
AclBindingFilter aclBinding = new AclBindingFilter(resourcePatternFilter, accessControlEntryFilter);
//加入集合
aclBindingCollection.add(aclBinding);
}
}
3.9 查看所有用户的所有权限
public static void describeAllAcl() throws ExecutionException, InterruptedException {
AdminClient adminClient = creatAdminClient();
DescribeAclsResult result = adminClient.describeAcls(AclBindingFilter.ANY);
try {
Collection<AclBinding> gets = result.values().get();
for (AclBinding get : gets) {
System.out.println(get.pattern().name());
System.out.println(get.pattern().patternType());
System.out.println(get.pattern().resourceType());
System.out.println(get.entry().principal());
System.out.println(get.entry().permissionType());
System.out.println(get.entry().operation());
System.out.println("-------------------------");
}
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
}