mirror of
https://github.com/didi/KnowStreaming.git
synced 2026-01-03 11:28:12 +08:00
Add km module kafka
This commit is contained in:
15
tests/kafkatest/services/security/__init__.py
Normal file
15
tests/kafkatest/services/security/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
75
tests/kafkatest/services/security/kafka_acls.py
Normal file
75
tests/kafkatest/services/security/kafka_acls.py
Normal file
@@ -0,0 +1,75 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from kafkatest.directory_layout.kafka_path import KafkaPathResolverMixin
|
||||
|
||||
|
||||
class ACLs(KafkaPathResolverMixin):
|
||||
def __init__(self, context):
|
||||
self.context = context
|
||||
|
||||
def set_acls(self, protocol, kafka, topic, group):
|
||||
node = kafka.nodes[0]
|
||||
setting = kafka.zk_connect_setting()
|
||||
|
||||
# Set server ACLs
|
||||
kafka_principal = "User:CN=systemtest" if protocol == "SSL" else "User:kafka"
|
||||
self.acls_command(node, ACLs.add_cluster_acl(setting, kafka_principal))
|
||||
self.acls_command(node, ACLs.broker_read_acl(setting, "*", kafka_principal))
|
||||
|
||||
# Set client ACLs
|
||||
client_principal = "User:CN=systemtest" if protocol == "SSL" else "User:client"
|
||||
self.acls_command(node, ACLs.produce_acl(setting, topic, client_principal))
|
||||
self.acls_command(node, ACLs.consume_acl(setting, topic, group, client_principal))
|
||||
|
||||
def acls_command(self, node, properties):
|
||||
cmd = "%s %s" % (self.path.script("kafka-acls.sh", node), properties)
|
||||
node.account.ssh(cmd)
|
||||
|
||||
@staticmethod
|
||||
def add_cluster_acl(zk_connect, principal="User:kafka"):
|
||||
return "--authorizer-properties zookeeper.connect=%(zk_connect)s --add --cluster " \
|
||||
"--operation=ClusterAction --allow-principal=%(principal)s " % {
|
||||
'zk_connect': zk_connect,
|
||||
'principal': principal
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def broker_read_acl(zk_connect, topic, principal="User:kafka"):
|
||||
return "--authorizer-properties zookeeper.connect=%(zk_connect)s --add --topic=%(topic)s " \
|
||||
"--operation=Read --allow-principal=%(principal)s " % {
|
||||
'zk_connect': zk_connect,
|
||||
'topic': topic,
|
||||
'principal': principal
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def produce_acl(zk_connect, topic, principal="User:client"):
|
||||
return "--authorizer-properties zookeeper.connect=%(zk_connect)s --add --topic=%(topic)s " \
|
||||
"--producer --allow-principal=%(principal)s " % {
|
||||
'zk_connect': zk_connect,
|
||||
'topic': topic,
|
||||
'principal': principal
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def consume_acl(zk_connect, topic, group, principal="User:client"):
|
||||
return "--authorizer-properties zookeeper.connect=%(zk_connect)s --add --topic=%(topic)s " \
|
||||
"--group=%(group)s --consumer --allow-principal=%(principal)s " % {
|
||||
'zk_connect': zk_connect,
|
||||
'topic': topic,
|
||||
'group': group,
|
||||
'principal': principal
|
||||
}
|
||||
@@ -0,0 +1,43 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
class ListenerSecurityConfig:
|
||||
|
||||
SASL_MECHANISM_PREFIXED_CONFIGS = ["connections.max.reauth.ms", "sasl.jaas.config",
|
||||
"sasl.login.callback.handler.class", "sasl.login.class",
|
||||
"sasl.server.callback.handler.class"]
|
||||
|
||||
def __init__(self, use_separate_interbroker_listener=False,
|
||||
client_listener_overrides={}, interbroker_listener_overrides={}):
|
||||
"""
|
||||
:param bool use_separate_interbroker_listener - if set, will use a separate interbroker listener,
|
||||
with security protocol set to interbroker_security_protocol value. If set, requires
|
||||
interbroker_security_protocol to be provided.
|
||||
Normally port name is the same as its security protocol, so setting security_protocol and
|
||||
interbroker_security_protocol to the same value will lead to a single port being open and both client
|
||||
and broker-to-broker communication will go over that port. This parameter allows
|
||||
you to add an interbroker listener with the same security protocol as a client listener, but running on a
|
||||
separate port.
|
||||
:param dict client_listener_overrides - non-prefixed listener config overrides for named client listener
|
||||
(for example 'sasl.jaas.config', 'ssl.keystore.location', 'sasl.login.callback.handler.class', etc).
|
||||
:param dict interbroker_listener_overrides - non-prefixed listener config overrides for named interbroker
|
||||
listener (for example 'sasl.jaas.config', 'ssl.keystore.location', 'sasl.login.callback.handler.class', etc).
|
||||
"""
|
||||
self.use_separate_interbroker_listener = use_separate_interbroker_listener
|
||||
self.client_listener_overrides = client_listener_overrides
|
||||
self.interbroker_listener_overrides = interbroker_listener_overrides
|
||||
|
||||
def requires_sasl_mechanism_prefix(self, config):
|
||||
return config in ListenerSecurityConfig.SASL_MECHANISM_PREFIXED_CONFIGS
|
||||
136
tests/kafkatest/services/security/minikdc.py
Normal file
136
tests/kafkatest/services/security/minikdc.py
Normal file
@@ -0,0 +1,136 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import random
|
||||
import uuid
|
||||
from io import open
|
||||
from os import remove, close
|
||||
from shutil import move
|
||||
from tempfile import mkstemp
|
||||
|
||||
from ducktape.services.service import Service
|
||||
|
||||
from kafkatest.directory_layout.kafka_path import KafkaPathResolverMixin, CORE_LIBS_JAR_NAME, CORE_DEPENDANT_TEST_LIBS_JAR_NAME
|
||||
from kafkatest.version import DEV_BRANCH
|
||||
|
||||
|
||||
class MiniKdc(KafkaPathResolverMixin, Service):
|
||||
|
||||
logs = {
|
||||
"minikdc_log": {
|
||||
"path": "/mnt/minikdc/minikdc.log",
|
||||
"collect_default": True}
|
||||
}
|
||||
|
||||
WORK_DIR = "/mnt/minikdc"
|
||||
PROPS_FILE = "/mnt/minikdc/minikdc.properties"
|
||||
KEYTAB_FILE = "/mnt/minikdc/keytab"
|
||||
KRB5CONF_FILE = "/mnt/minikdc/krb5.conf"
|
||||
LOG_FILE = "/mnt/minikdc/minikdc.log"
|
||||
|
||||
LOCAL_KEYTAB_FILE = None
|
||||
LOCAL_KRB5CONF_FILE = None
|
||||
|
||||
@staticmethod
|
||||
def _set_local_keytab_file(local_scratch_dir):
|
||||
"""Set MiniKdc.LOCAL_KEYTAB_FILE exactly once per test.
|
||||
|
||||
LOCAL_KEYTAB_FILE is currently used like a global variable to provide a mechanism to share the
|
||||
location of the local keytab file among all services which might need it.
|
||||
|
||||
Since individual ducktape tests are each run in a subprocess forked from the ducktape main process,
|
||||
class variables set at class load time are duplicated between test processes. This leads to collisions
|
||||
if test subprocesses are run in parallel, so we defer setting these class variables until after the test itself
|
||||
begins to run.
|
||||
"""
|
||||
if MiniKdc.LOCAL_KEYTAB_FILE is None:
|
||||
MiniKdc.LOCAL_KEYTAB_FILE = os.path.join(local_scratch_dir, "keytab")
|
||||
return MiniKdc.LOCAL_KEYTAB_FILE
|
||||
|
||||
@staticmethod
|
||||
def _set_local_krb5conf_file(local_scratch_dir):
|
||||
"""Set MiniKdc.LOCAL_KRB5CONF_FILE exactly once per test.
|
||||
|
||||
See _set_local_keytab_file for details why we do this.
|
||||
"""
|
||||
|
||||
if MiniKdc.LOCAL_KRB5CONF_FILE is None:
|
||||
MiniKdc.LOCAL_KRB5CONF_FILE = os.path.join(local_scratch_dir, "krb5conf")
|
||||
return MiniKdc.LOCAL_KRB5CONF_FILE
|
||||
|
||||
def __init__(self, context, kafka_nodes, extra_principals=""):
|
||||
super(MiniKdc, self).__init__(context, 1)
|
||||
self.kafka_nodes = kafka_nodes
|
||||
self.extra_principals = extra_principals
|
||||
|
||||
# context.local_scratch_dir uses a ducktape feature:
|
||||
# each test_context object has a unique local scratch directory which is available for the duration of the test
|
||||
# which is automatically garbage collected after the test finishes
|
||||
MiniKdc._set_local_keytab_file(context.local_scratch_dir)
|
||||
MiniKdc._set_local_krb5conf_file(context.local_scratch_dir)
|
||||
|
||||
def replace_in_file(self, file_path, pattern, subst):
|
||||
fh, abs_path = mkstemp()
|
||||
with open(abs_path, 'w') as new_file:
|
||||
with open(file_path) as old_file:
|
||||
for line in old_file:
|
||||
new_file.write(line.replace(pattern, subst))
|
||||
close(fh)
|
||||
remove(file_path)
|
||||
move(abs_path, file_path)
|
||||
|
||||
def start_node(self, node):
|
||||
node.account.ssh("mkdir -p %s" % MiniKdc.WORK_DIR, allow_fail=False)
|
||||
props_file = self.render('minikdc.properties', node=node)
|
||||
node.account.create_file(MiniKdc.PROPS_FILE, props_file)
|
||||
self.logger.info("minikdc.properties")
|
||||
self.logger.info(props_file)
|
||||
|
||||
kafka_principals = ' '.join(['kafka/' + kafka_node.account.hostname for kafka_node in self.kafka_nodes])
|
||||
principals = 'client ' + kafka_principals + ' ' + self.extra_principals
|
||||
self.logger.info("Starting MiniKdc with principals " + principals)
|
||||
|
||||
core_libs_jar = self.path.jar(CORE_LIBS_JAR_NAME, DEV_BRANCH)
|
||||
core_dependant_test_libs_jar = self.path.jar(CORE_DEPENDANT_TEST_LIBS_JAR_NAME, DEV_BRANCH)
|
||||
|
||||
cmd = "for file in %s; do CLASSPATH=$CLASSPATH:$file; done;" % core_libs_jar
|
||||
cmd += " for file in %s; do CLASSPATH=$CLASSPATH:$file; done;" % core_dependant_test_libs_jar
|
||||
cmd += " export CLASSPATH;"
|
||||
cmd += " %s kafka.security.minikdc.MiniKdc %s %s %s %s 1>> %s 2>> %s &" % (self.path.script("kafka-run-class.sh", node), MiniKdc.WORK_DIR, MiniKdc.PROPS_FILE, MiniKdc.KEYTAB_FILE, principals, MiniKdc.LOG_FILE, MiniKdc.LOG_FILE)
|
||||
self.logger.debug("Attempting to start MiniKdc on %s with command: %s" % (str(node.account), cmd))
|
||||
with node.account.monitor_log(MiniKdc.LOG_FILE) as monitor:
|
||||
node.account.ssh(cmd)
|
||||
monitor.wait_until("MiniKdc Running", timeout_sec=60, backoff_sec=1, err_msg="MiniKdc didn't finish startup")
|
||||
|
||||
node.account.copy_from(MiniKdc.KEYTAB_FILE, MiniKdc.LOCAL_KEYTAB_FILE)
|
||||
node.account.copy_from(MiniKdc.KRB5CONF_FILE, MiniKdc.LOCAL_KRB5CONF_FILE)
|
||||
|
||||
# KDC is set to bind openly (via 0.0.0.0). Change krb5.conf to hold the specific KDC address
|
||||
self.replace_in_file(MiniKdc.LOCAL_KRB5CONF_FILE, '0.0.0.0', node.account.hostname)
|
||||
|
||||
def stop_node(self, node):
|
||||
self.logger.info("Stopping %s on %s" % (type(self).__name__, node.account.hostname))
|
||||
node.account.kill_java_processes("MiniKdc", clean_shutdown=True, allow_fail=False)
|
||||
|
||||
def clean_node(self, node):
|
||||
node.account.kill_java_processes("MiniKdc", clean_shutdown=False, allow_fail=True)
|
||||
node.account.ssh("rm -rf " + MiniKdc.WORK_DIR, allow_fail=False)
|
||||
if os.path.exists(MiniKdc.LOCAL_KEYTAB_FILE):
|
||||
os.remove(MiniKdc.LOCAL_KEYTAB_FILE)
|
||||
if os.path.exists(MiniKdc.LOCAL_KRB5CONF_FILE):
|
||||
os.remove(MiniKdc.LOCAL_KRB5CONF_FILE)
|
||||
|
||||
|
||||
352
tests/kafkatest/services/security/security_config.py
Normal file
352
tests/kafkatest/services/security/security_config.py
Normal file
@@ -0,0 +1,352 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
from tempfile import mkdtemp
|
||||
from shutil import rmtree
|
||||
from ducktape.template import TemplateRenderer
|
||||
from kafkatest.services.security.minikdc import MiniKdc
|
||||
from kafkatest.services.security.listener_security_config import ListenerSecurityConfig
|
||||
import itertools
|
||||
|
||||
|
||||
class SslStores(object):
|
||||
def __init__(self, local_scratch_dir, logger=None):
|
||||
self.logger = logger
|
||||
self.ca_crt_path = os.path.join(local_scratch_dir, "test.ca.crt")
|
||||
self.ca_jks_path = os.path.join(local_scratch_dir, "test.ca.jks")
|
||||
self.ca_passwd = "test-ca-passwd"
|
||||
|
||||
self.truststore_path = os.path.join(local_scratch_dir, "test.truststore.jks")
|
||||
self.truststore_passwd = "test-ts-passwd"
|
||||
self.keystore_passwd = "test-ks-passwd"
|
||||
# Zookeeper TLS (as of v3.5.6) does not support a key password different than the keystore password
|
||||
self.key_passwd = self.keystore_passwd
|
||||
# Allow upto one hour of clock skew between host and VMs
|
||||
self.startdate = "-1H"
|
||||
|
||||
for file in [self.ca_crt_path, self.ca_jks_path, self.truststore_path]:
|
||||
if os.path.exists(file):
|
||||
os.remove(file)
|
||||
|
||||
def generate_ca(self):
|
||||
"""
|
||||
Generate CA private key and certificate.
|
||||
"""
|
||||
|
||||
self.runcmd("keytool -genkeypair -alias ca -keyalg RSA -keysize 2048 -keystore %s -storetype JKS -storepass %s -keypass %s -dname CN=SystemTestCA -startdate %s --ext bc=ca:true" % (self.ca_jks_path, self.ca_passwd, self.ca_passwd, self.startdate))
|
||||
self.runcmd("keytool -export -alias ca -keystore %s -storepass %s -storetype JKS -rfc -file %s" % (self.ca_jks_path, self.ca_passwd, self.ca_crt_path))
|
||||
|
||||
def generate_truststore(self):
|
||||
"""
|
||||
Generate JKS truststore containing CA certificate.
|
||||
"""
|
||||
|
||||
self.runcmd("keytool -importcert -alias ca -file %s -keystore %s -storepass %s -storetype JKS -noprompt" % (self.ca_crt_path, self.truststore_path, self.truststore_passwd))
|
||||
|
||||
def generate_and_copy_keystore(self, node):
|
||||
"""
|
||||
Generate JKS keystore with certificate signed by the test CA.
|
||||
The generated certificate has the node's hostname as a DNS SubjectAlternativeName.
|
||||
"""
|
||||
|
||||
ks_dir = mkdtemp(dir="/tmp")
|
||||
ks_path = os.path.join(ks_dir, "test.keystore.jks")
|
||||
csr_path = os.path.join(ks_dir, "test.kafka.csr")
|
||||
crt_path = os.path.join(ks_dir, "test.kafka.crt")
|
||||
|
||||
self.runcmd("keytool -genkeypair -alias kafka -keyalg RSA -keysize 2048 -keystore %s -storepass %s -storetype JKS -keypass %s -dname CN=systemtest -ext SAN=DNS:%s -startdate %s" % (ks_path, self.keystore_passwd, self.key_passwd, self.hostname(node), self.startdate))
|
||||
self.runcmd("keytool -certreq -keystore %s -storepass %s -storetype JKS -keypass %s -alias kafka -file %s" % (ks_path, self.keystore_passwd, self.key_passwd, csr_path))
|
||||
self.runcmd("keytool -gencert -keystore %s -storepass %s -storetype JKS -alias ca -infile %s -outfile %s -dname CN=systemtest -ext SAN=DNS:%s -startdate %s" % (self.ca_jks_path, self.ca_passwd, csr_path, crt_path, self.hostname(node), self.startdate))
|
||||
self.runcmd("keytool -importcert -keystore %s -storepass %s -storetype JKS -alias ca -file %s -noprompt" % (ks_path, self.keystore_passwd, self.ca_crt_path))
|
||||
self.runcmd("keytool -importcert -keystore %s -storepass %s -storetype JKS -keypass %s -alias kafka -file %s -noprompt" % (ks_path, self.keystore_passwd, self.key_passwd, crt_path))
|
||||
node.account.copy_to(ks_path, SecurityConfig.KEYSTORE_PATH)
|
||||
|
||||
# generate ZooKeeper client TLS config file for encryption-only (no client cert) use case
|
||||
str = """zookeeper.clientCnxnSocket=org.apache.zookeeper.ClientCnxnSocketNetty
|
||||
zookeeper.ssl.client.enable=true
|
||||
zookeeper.ssl.truststore.location=%s
|
||||
zookeeper.ssl.truststore.password=%s
|
||||
""" % (SecurityConfig.TRUSTSTORE_PATH, self.truststore_passwd)
|
||||
node.account.create_file(SecurityConfig.ZK_CLIENT_TLS_ENCRYPT_ONLY_CONFIG_PATH, str)
|
||||
|
||||
# also generate ZooKeeper client TLS config file for mutual authentication use case
|
||||
str = """zookeeper.clientCnxnSocket=org.apache.zookeeper.ClientCnxnSocketNetty
|
||||
zookeeper.ssl.client.enable=true
|
||||
zookeeper.ssl.truststore.location=%s
|
||||
zookeeper.ssl.truststore.password=%s
|
||||
zookeeper.ssl.keystore.location=%s
|
||||
zookeeper.ssl.keystore.password=%s
|
||||
""" % (SecurityConfig.TRUSTSTORE_PATH, self.truststore_passwd, SecurityConfig.KEYSTORE_PATH, self.keystore_passwd)
|
||||
node.account.create_file(SecurityConfig.ZK_CLIENT_MUTUAL_AUTH_CONFIG_PATH, str)
|
||||
|
||||
rmtree(ks_dir)
|
||||
|
||||
def hostname(self, node):
|
||||
""" Hostname which may be overridden for testing validation failures
|
||||
"""
|
||||
return node.account.hostname
|
||||
|
||||
def runcmd(self, cmd):
|
||||
if self.logger:
|
||||
self.logger.log(logging.DEBUG, cmd)
|
||||
proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
stdout, stderr = proc.communicate()
|
||||
|
||||
if proc.returncode != 0:
|
||||
raise RuntimeError("Command '%s' returned non-zero exit status %d: %s" % (cmd, proc.returncode, stdout))
|
||||
|
||||
|
||||
class SecurityConfig(TemplateRenderer):
|
||||
|
||||
PLAINTEXT = 'PLAINTEXT'
|
||||
SSL = 'SSL'
|
||||
SASL_PLAINTEXT = 'SASL_PLAINTEXT'
|
||||
SASL_SSL = 'SASL_SSL'
|
||||
SASL_MECHANISM_GSSAPI = 'GSSAPI'
|
||||
SASL_MECHANISM_PLAIN = 'PLAIN'
|
||||
SASL_MECHANISM_SCRAM_SHA_256 = 'SCRAM-SHA-256'
|
||||
SASL_MECHANISM_SCRAM_SHA_512 = 'SCRAM-SHA-512'
|
||||
SCRAM_CLIENT_USER = "kafka-client"
|
||||
SCRAM_CLIENT_PASSWORD = "client-secret"
|
||||
SCRAM_BROKER_USER = "kafka-broker"
|
||||
SCRAM_BROKER_PASSWORD = "broker-secret"
|
||||
CONFIG_DIR = "/mnt/security"
|
||||
KEYSTORE_PATH = "/mnt/security/test.keystore.jks"
|
||||
TRUSTSTORE_PATH = "/mnt/security/test.truststore.jks"
|
||||
ZK_CLIENT_TLS_ENCRYPT_ONLY_CONFIG_PATH = "/mnt/security/zk_client_tls_encrypt_only_config.properties"
|
||||
ZK_CLIENT_MUTUAL_AUTH_CONFIG_PATH = "/mnt/security/zk_client_mutual_auth_config.properties"
|
||||
JAAS_CONF_PATH = "/mnt/security/jaas.conf"
|
||||
KRB5CONF_PATH = "/mnt/security/krb5.conf"
|
||||
KEYTAB_PATH = "/mnt/security/keytab"
|
||||
|
||||
# This is initialized only when the first instance of SecurityConfig is created
|
||||
ssl_stores = None
|
||||
|
||||
def __init__(self, context, security_protocol=None, interbroker_security_protocol=None,
|
||||
client_sasl_mechanism=SASL_MECHANISM_GSSAPI, interbroker_sasl_mechanism=SASL_MECHANISM_GSSAPI,
|
||||
zk_sasl=False, zk_tls=False, template_props="", static_jaas_conf=True, jaas_override_variables=None,
|
||||
listener_security_config=ListenerSecurityConfig()):
|
||||
"""
|
||||
Initialize the security properties for the node and copy
|
||||
keystore and truststore to the remote node if the transport protocol
|
||||
is SSL. If security_protocol is None, the protocol specified in the
|
||||
template properties file is used. If no protocol is specified in the
|
||||
template properties either, PLAINTEXT is used as default.
|
||||
"""
|
||||
|
||||
self.context = context
|
||||
if not SecurityConfig.ssl_stores:
|
||||
# This generates keystore/trustore files in a local scratch directory which gets
|
||||
# automatically destroyed after the test is run
|
||||
# Creating within the scratch directory allows us to run tests in parallel without fear of collision
|
||||
SecurityConfig.ssl_stores = SslStores(context.local_scratch_dir, context.logger)
|
||||
SecurityConfig.ssl_stores.generate_ca()
|
||||
SecurityConfig.ssl_stores.generate_truststore()
|
||||
|
||||
if security_protocol is None:
|
||||
security_protocol = self.get_property('security.protocol', template_props)
|
||||
if security_protocol is None:
|
||||
security_protocol = SecurityConfig.PLAINTEXT
|
||||
elif security_protocol not in [SecurityConfig.PLAINTEXT, SecurityConfig.SSL, SecurityConfig.SASL_PLAINTEXT, SecurityConfig.SASL_SSL]:
|
||||
raise Exception("Invalid security.protocol in template properties: " + security_protocol)
|
||||
|
||||
if interbroker_security_protocol is None:
|
||||
interbroker_security_protocol = security_protocol
|
||||
self.interbroker_security_protocol = interbroker_security_protocol
|
||||
self.has_sasl = self.is_sasl(security_protocol) or self.is_sasl(interbroker_security_protocol) or zk_sasl
|
||||
self.has_ssl = self.is_ssl(security_protocol) or self.is_ssl(interbroker_security_protocol) or zk_tls
|
||||
self.zk_sasl = zk_sasl
|
||||
self.zk_tls = zk_tls
|
||||
self.static_jaas_conf = static_jaas_conf
|
||||
self.listener_security_config = listener_security_config
|
||||
self.properties = {
|
||||
'security.protocol' : security_protocol,
|
||||
'ssl.keystore.location' : SecurityConfig.KEYSTORE_PATH,
|
||||
'ssl.keystore.password' : SecurityConfig.ssl_stores.keystore_passwd,
|
||||
'ssl.key.password' : SecurityConfig.ssl_stores.key_passwd,
|
||||
'ssl.truststore.location' : SecurityConfig.TRUSTSTORE_PATH,
|
||||
'ssl.truststore.password' : SecurityConfig.ssl_stores.truststore_passwd,
|
||||
'ssl.endpoint.identification.algorithm' : 'HTTPS',
|
||||
'sasl.mechanism' : client_sasl_mechanism,
|
||||
'sasl.mechanism.inter.broker.protocol' : interbroker_sasl_mechanism,
|
||||
'sasl.kerberos.service.name' : 'kafka'
|
||||
}
|
||||
self.properties.update(self.listener_security_config.client_listener_overrides)
|
||||
self.jaas_override_variables = jaas_override_variables or {}
|
||||
|
||||
def client_config(self, template_props="", node=None, jaas_override_variables=None):
|
||||
# If node is not specified, use static jaas config which will be created later.
|
||||
# Otherwise use static JAAS configuration files with SASL_SSL and sasl.jaas.config
|
||||
# property with SASL_PLAINTEXT so that both code paths are tested by existing tests.
|
||||
# Note that this is an artibtrary choice and it is possible to run all tests with
|
||||
# either static or dynamic jaas config files if required.
|
||||
static_jaas_conf = node is None or (self.has_sasl and self.has_ssl)
|
||||
return SecurityConfig(self.context, self.security_protocol,
|
||||
client_sasl_mechanism=self.client_sasl_mechanism,
|
||||
template_props=template_props,
|
||||
static_jaas_conf=static_jaas_conf,
|
||||
jaas_override_variables=jaas_override_variables,
|
||||
listener_security_config=self.listener_security_config)
|
||||
|
||||
def enable_security_protocol(self, security_protocol):
|
||||
self.has_sasl = self.has_sasl or self.is_sasl(security_protocol)
|
||||
self.has_ssl = self.has_ssl or self.is_ssl(security_protocol)
|
||||
|
||||
def setup_ssl(self, node):
|
||||
node.account.ssh("mkdir -p %s" % SecurityConfig.CONFIG_DIR, allow_fail=False)
|
||||
node.account.copy_to(SecurityConfig.ssl_stores.truststore_path, SecurityConfig.TRUSTSTORE_PATH)
|
||||
SecurityConfig.ssl_stores.generate_and_copy_keystore(node)
|
||||
|
||||
def setup_sasl(self, node):
|
||||
node.account.ssh("mkdir -p %s" % SecurityConfig.CONFIG_DIR, allow_fail=False)
|
||||
jaas_conf_file = "jaas.conf"
|
||||
java_version = node.account.ssh_capture("java -version")
|
||||
|
||||
jaas_conf = None
|
||||
if 'sasl.jaas.config' not in self.properties:
|
||||
jaas_conf = self.render_jaas_config(
|
||||
jaas_conf_file,
|
||||
{
|
||||
'node': node,
|
||||
'is_ibm_jdk': any('IBM' in line for line in java_version),
|
||||
'SecurityConfig': SecurityConfig,
|
||||
'client_sasl_mechanism': self.client_sasl_mechanism,
|
||||
'enabled_sasl_mechanisms': self.enabled_sasl_mechanisms
|
||||
}
|
||||
)
|
||||
else:
|
||||
jaas_conf = self.properties['sasl.jaas.config']
|
||||
|
||||
if self.static_jaas_conf:
|
||||
node.account.create_file(SecurityConfig.JAAS_CONF_PATH, jaas_conf)
|
||||
elif 'sasl.jaas.config' not in self.properties:
|
||||
self.properties['sasl.jaas.config'] = jaas_conf.replace("\n", " \\\n")
|
||||
if self.has_sasl_kerberos:
|
||||
node.account.copy_to(MiniKdc.LOCAL_KEYTAB_FILE, SecurityConfig.KEYTAB_PATH)
|
||||
node.account.copy_to(MiniKdc.LOCAL_KRB5CONF_FILE, SecurityConfig.KRB5CONF_PATH)
|
||||
|
||||
def render_jaas_config(self, jaas_conf_file, config_variables):
|
||||
"""
|
||||
Renders the JAAS config file contents
|
||||
|
||||
:param jaas_conf_file: name of the JAAS config template file
|
||||
:param config_variables: dict of variables used in the template
|
||||
:return: the rendered template string
|
||||
"""
|
||||
variables = config_variables.copy()
|
||||
variables.update(self.jaas_override_variables) # override variables
|
||||
return self.render(jaas_conf_file, **variables)
|
||||
|
||||
def setup_node(self, node):
|
||||
if self.has_ssl:
|
||||
self.setup_ssl(node)
|
||||
|
||||
if self.has_sasl:
|
||||
self.setup_sasl(node)
|
||||
|
||||
def setup_credentials(self, node, path, zk_connect, broker):
|
||||
if broker:
|
||||
self.maybe_create_scram_credentials(node, zk_connect, path, self.interbroker_sasl_mechanism,
|
||||
SecurityConfig.SCRAM_BROKER_USER, SecurityConfig.SCRAM_BROKER_PASSWORD)
|
||||
else:
|
||||
self.maybe_create_scram_credentials(node, zk_connect, path, self.client_sasl_mechanism,
|
||||
SecurityConfig.SCRAM_CLIENT_USER, SecurityConfig.SCRAM_CLIENT_PASSWORD)
|
||||
|
||||
def maybe_create_scram_credentials(self, node, zk_connect, path, mechanism, user_name, password):
|
||||
if self.has_sasl and self.is_sasl_scram(mechanism):
|
||||
cmd = "%s --zookeeper %s --entity-name %s --entity-type users --alter --add-config %s=[password=%s]" % \
|
||||
(path.script("kafka-configs.sh", node), zk_connect,
|
||||
user_name, mechanism, password)
|
||||
node.account.ssh(cmd)
|
||||
|
||||
def clean_node(self, node):
|
||||
if self.security_protocol != SecurityConfig.PLAINTEXT:
|
||||
node.account.ssh("rm -rf %s" % SecurityConfig.CONFIG_DIR, allow_fail=False)
|
||||
|
||||
def get_property(self, prop_name, template_props=""):
|
||||
"""
|
||||
Get property value from the string representation of
|
||||
a properties file.
|
||||
"""
|
||||
value = None
|
||||
for line in template_props.split("\n"):
|
||||
items = line.split("=")
|
||||
if len(items) == 2 and items[0].strip() == prop_name:
|
||||
value = str(items[1].strip())
|
||||
return value
|
||||
|
||||
def is_ssl(self, security_protocol):
|
||||
return security_protocol == SecurityConfig.SSL or security_protocol == SecurityConfig.SASL_SSL
|
||||
|
||||
def is_sasl(self, security_protocol):
|
||||
return security_protocol == SecurityConfig.SASL_PLAINTEXT or security_protocol == SecurityConfig.SASL_SSL
|
||||
|
||||
def is_sasl_scram(self, sasl_mechanism):
|
||||
return sasl_mechanism == SecurityConfig.SASL_MECHANISM_SCRAM_SHA_256 or sasl_mechanism == SecurityConfig.SASL_MECHANISM_SCRAM_SHA_512
|
||||
|
||||
@property
|
||||
def security_protocol(self):
|
||||
return self.properties['security.protocol']
|
||||
|
||||
@property
|
||||
def client_sasl_mechanism(self):
|
||||
return self.properties['sasl.mechanism']
|
||||
|
||||
@property
|
||||
def interbroker_sasl_mechanism(self):
|
||||
return self.properties['sasl.mechanism.inter.broker.protocol']
|
||||
|
||||
@property
|
||||
def enabled_sasl_mechanisms(self):
|
||||
return set([self.client_sasl_mechanism, self.interbroker_sasl_mechanism])
|
||||
|
||||
@property
|
||||
def has_sasl_kerberos(self):
|
||||
return self.has_sasl and (SecurityConfig.SASL_MECHANISM_GSSAPI in self.enabled_sasl_mechanisms)
|
||||
|
||||
@property
|
||||
def kafka_opts(self):
|
||||
if self.has_sasl:
|
||||
if self.static_jaas_conf:
|
||||
return "\"-Djava.security.auth.login.config=%s -Djava.security.krb5.conf=%s\"" % (SecurityConfig.JAAS_CONF_PATH, SecurityConfig.KRB5CONF_PATH)
|
||||
else:
|
||||
return "\"-Djava.security.krb5.conf=%s\"" % SecurityConfig.KRB5CONF_PATH
|
||||
else:
|
||||
return ""
|
||||
|
||||
def props(self, prefix=''):
|
||||
"""
|
||||
Return properties as string with line separators, optionally with a prefix.
|
||||
This is used to append security config properties to
|
||||
a properties file.
|
||||
:param prefix: prefix to add to each property
|
||||
:return: a string containing line-separated properties
|
||||
"""
|
||||
if self.security_protocol == SecurityConfig.PLAINTEXT:
|
||||
return ""
|
||||
if self.has_sasl and not self.static_jaas_conf and 'sasl.jaas.config' not in self.properties:
|
||||
raise Exception("JAAS configuration property has not yet been initialized")
|
||||
config_lines = (prefix + key + "=" + value for key, value in self.properties.iteritems())
|
||||
# Extra blank lines ensure this can be appended/prepended safely
|
||||
return "\n".join(itertools.chain([""], config_lines, [""]))
|
||||
|
||||
def __str__(self):
|
||||
"""
|
||||
Return properties as a string with line separators.
|
||||
"""
|
||||
return self.props()
|
||||
108
tests/kafkatest/services/security/templates/jaas.conf
Normal file
108
tests/kafkatest/services/security/templates/jaas.conf
Normal file
@@ -0,0 +1,108 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE
|
||||
* file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file
|
||||
* to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
|
||||
* License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
||||
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
{% if static_jaas_conf %}
|
||||
KafkaClient {
|
||||
{% endif %}
|
||||
{% if "GSSAPI" in client_sasl_mechanism %}
|
||||
{% if is_ibm_jdk %}
|
||||
com.ibm.security.auth.module.Krb5LoginModule required debug=false
|
||||
credsType=both
|
||||
useKeytab="file:/mnt/security/keytab"
|
||||
principal="client@EXAMPLE.COM";
|
||||
{% else %}
|
||||
com.sun.security.auth.module.Krb5LoginModule required debug=false
|
||||
doNotPrompt=true
|
||||
useKeyTab=true
|
||||
storeKey=true
|
||||
keyTab="/mnt/security/keytab"
|
||||
principal="client@EXAMPLE.COM";
|
||||
{% endif %}
|
||||
{% elif client_sasl_mechanism == "PLAIN" %}
|
||||
org.apache.kafka.common.security.plain.PlainLoginModule required
|
||||
username="client"
|
||||
password="client-secret";
|
||||
{% elif "SCRAM-SHA-256" in client_sasl_mechanism or "SCRAM-SHA-512" in client_sasl_mechanism %}
|
||||
org.apache.kafka.common.security.scram.ScramLoginModule required
|
||||
username="{{ SecurityConfig.SCRAM_CLIENT_USER }}"
|
||||
password="{{ SecurityConfig.SCRAM_CLIENT_PASSWORD }}";
|
||||
{% endif %}
|
||||
|
||||
{% if static_jaas_conf %}
|
||||
};
|
||||
|
||||
KafkaServer {
|
||||
{% if "GSSAPI" in enabled_sasl_mechanisms %}
|
||||
{% if is_ibm_jdk %}
|
||||
com.ibm.security.auth.module.Krb5LoginModule required debug=false
|
||||
credsType=both
|
||||
useKeytab="file:/mnt/security/keytab"
|
||||
principal="kafka/{{ node.account.hostname }}@EXAMPLE.COM";
|
||||
{% else %}
|
||||
com.sun.security.auth.module.Krb5LoginModule required debug=false
|
||||
doNotPrompt=true
|
||||
useKeyTab=true
|
||||
storeKey=true
|
||||
keyTab="/mnt/security/keytab"
|
||||
principal="kafka/{{ node.account.hostname }}@EXAMPLE.COM";
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% if "PLAIN" in enabled_sasl_mechanisms %}
|
||||
org.apache.kafka.common.security.plain.PlainLoginModule required
|
||||
username="kafka"
|
||||
password="kafka-secret"
|
||||
user_client="client-secret"
|
||||
user_kafka="kafka-secret";
|
||||
{% endif %}
|
||||
{% if "SCRAM-SHA-256" in client_sasl_mechanism or "SCRAM-SHA-512" in client_sasl_mechanism %}
|
||||
org.apache.kafka.common.security.scram.ScramLoginModule required
|
||||
username="{{ SecurityConfig.SCRAM_BROKER_USER }}"
|
||||
password="{{ SecurityConfig.SCRAM_BROKER_PASSWORD }}";
|
||||
{% endif %}
|
||||
};
|
||||
|
||||
{% if zk_sasl %}
|
||||
Client {
|
||||
{% if is_ibm_jdk %}
|
||||
com.ibm.security.auth.module.Krb5LoginModule required debug=false
|
||||
credsType=both
|
||||
useKeytab="file:/mnt/security/keytab"
|
||||
principal="zkclient@EXAMPLE.COM";
|
||||
{% else %}
|
||||
com.sun.security.auth.module.Krb5LoginModule required
|
||||
useKeyTab=true
|
||||
keyTab="/mnt/security/keytab"
|
||||
storeKey=true
|
||||
useTicketCache=false
|
||||
principal="zkclient@EXAMPLE.COM";
|
||||
{% endif %}
|
||||
};
|
||||
|
||||
Server {
|
||||
{% if is_ibm_jdk %}
|
||||
com.ibm.security.auth.module.Krb5LoginModule required debug=false
|
||||
credsType=both
|
||||
useKeyTab="file:/mnt/security/keytab"
|
||||
principal="zookeeper/{{ node.account.hostname }}@EXAMPLE.COM";
|
||||
{% else %}
|
||||
com.sun.security.auth.module.Krb5LoginModule required
|
||||
useKeyTab=true
|
||||
keyTab="/mnt/security/keytab"
|
||||
storeKey=true
|
||||
useTicketCache=false
|
||||
principal="zookeeper/{{ node.account.hostname }}@EXAMPLE.COM";
|
||||
{% endif %}
|
||||
};
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
@@ -0,0 +1,17 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
kdc.bind.address=0.0.0.0
|
||||
|
||||
Reference in New Issue
Block a user