Ubuntu上Kafka安全设置实操指南
一 安全目标与总体架构
二 生成证书与密钥库
KAFKA_HOME=/opt/kafka
mkdir -p $KAFKA_HOME/config/certs
keytool -genkey \
-alias kafka \
-keyalg RSA \
-keystore $KAFKA_HOME/config/certs/kafka.server.keystore.jks \
-validity 3650 \
-keysize 2048
# 导出
keytool -export \
-alias kafka \
-file $KAFKA_HOME/config/certs/kafka.server.crt \
-keystore $KAFKA_HOME/config/certs/kafka.server.keystore.jks
# 导入到信任库(客户端与集群内各 Broker 均执行)
keytool -import \
-alias kafka \
-file $KAFKA_HOME/config/certs/kafka.server.crt \
-keystore $KAFKA_HOME/config/certs/kafka.server.truststore.jks
三 配置 Broker 安全参数
# 监听与对外地址(按实际主机名/IP填写)
listeners=SASL_SSL://0.0.0.0:9093
advertised.listeners=SASL_SSL://<your.kafka.host>:9093
# 内部通信安全
security.inter.broker.protocol=SASL_SSL
sasl.mechanism.inter.broker.protocol=SCRAM-SHA-512
sasl.enabled.mechanisms=SCRAM-SHA-512
# SSL 参数
ssl.keystore.location=/opt/kafka/config/certs/kafka.server.keystore.jks
ssl.keystore.password=YourKeystorePass
ssl.key.password=YourKeyPass
ssl.truststore.location=/opt/kafka/config/certs/kafka.server.truststore.jks
ssl.truststore.password=YourTruststorePass
ssl.client.auth=required
ssl.enabled.protocols=TLSv1.2,TLSv1.3
# 可选:限制加密套件
# ssl.cipher.suites=TLS_AES_128_GCM_SHA256,TLS_AES_256_GCM_SHA384
# 授权器(ACL)
authorizer.class.name=kafka.security.authorizer.AclAuthorizer
# 超级用户(谨慎授予,运维/初始化用)
super.users=User:admin
四 配置 JAAS 与用户凭证
KafkaServer {
org.apache.kafka.common.security.scram.ScramLoginModule required
username="admin"
password="AdminPass!";
};
# 如果仍使用 Zookeeper,需为 ZK 连接配置身份(示例)
Client {
org.apache.zookeeper.server.auth.DigestLoginModule required
username="zkadmin"
password="ZkPass!";
};
export KAFKA_OPTS="-Djava.security.auth.login.config=$KAFKA_HOME/config/kafka_server_jaas.conf"
# 创建 admin(如上面 JAAS 已配置,可跳过或用于变更密码)
$KAFKA_HOME/bin/kafka-configs.sh --alter \
--add-config 'SCRAM-SHA-512=[iterations=8192,password=AdminPass!]' \
--entity-type users --entity-name admin
# 创建业务用户
$KAFKA_HOME/bin/kafka-configs.sh --alter \
--add-config 'SCRAM-SHA-512=[iterations=8192,password=WriterPass!' \
--entity-type users --entity-name writer
$KAFKA_HOME/bin/kafka-configs.sh --alter \
--add-config 'SCRAM-SHA-512=[iterations=8192,password=ReaderPass!' \
--entity-type users --entity-name reader
五 客户端与 ACL 验证及网络加固
security.protocol=SASL_SSL
sasl.mechanism=SCRAM-SHA-512
sasl.jaas.config=org.apache.kafka.common.security.scram.ScramLoginModule required \
username="writer" \
password="WriterPass!";
ssl.truststore.location=/opt/kafka/config/certs/kafka.server.truststore.jks
ssl.truststore.password=YourTruststorePass
# 双向认证时再配置客户端密钥库
# ssl.keystore.location=/path/to/client.keystore.jks
# ssl.keystore.password=YourClientKeystorePass
# ssl.key.password=YourClientKeyPass
# 列出 Topic(使用 SCRAM 凭证)
$KAFKA_HOME/bin/kafka-topics.sh --list \
--bootstrap-server <your.kafka.host>:9093 \
--command-config <(echo -e "security.protocol=SASL_SSL\nsasl.mechanism=SCRAM-SHA-512\nsasl.jaas.config=org.apache.kafka.common.security.scram.ScramLoginModule required username=\"writer\" password=\"WriterPass!\";")
# 生产消息
$KAFKA_HOME/bin/kafka-console-producer.sh --topic test-topic \
--bootstrap-server <your.kafka.host>:9093 \
--producer.config producer.properties
# 消费消息
$KAFKA_HOME/bin/kafka-console-consumer.sh --topic test-topic \
--from-beginning \
--bootstrap-server <your.kafka.host>:9093 \
--consumer.config consumer.properties
# 写权限
$KAFKA_HOME/bin/kafka-acls.sh --add \
--topic test-topic \
--principal User:writer \
--operation Write --operation Create \
--host <client.ip.or.cidr> \
--bootstrap-server <your.kafka.host>:9093
# 读权限
$KAFKA_HOME/bin/kafka-acls.sh --add \
--topic test-topic \
--principal User:reader \
--operation Read --operation Describe \
--group '*' \
--host <client.ip.or.cidr> \
--bootstrap-server <your.kafka.host>:9093
# 查看 ACL
$KAFKA_HOME/bin/kafka-acls.sh --list \
--topic test-topic \
--bootstrap-server <your.kafka.host>:9093
sudo ufw allow 9093/tcp
sudo ufw enable
# 云环境请同时配置安全组仅放通可信网段