mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-29 19:12:03 +00:00
test_for_basic_auth_registry - started, but only auth test works
This commit is contained in:
parent
fb3a860d7f
commit
db8e96147a
@ -1,55 +1,41 @@
|
|||||||
version: '2.3'
|
version: '2.3'
|
||||||
|
|
||||||
services:
|
services:
|
||||||
|
# kafka_zookeeper:
|
||||||
|
# image: zookeeper:3.4.9
|
||||||
|
# hostname: kafka_zookeeper
|
||||||
|
# environment:
|
||||||
|
# ZOO_MY_ID: 1
|
||||||
|
# ZOO_PORT: 2181
|
||||||
|
# ZOO_SERVERS: server.1=kafka_zookeeper:2888:3888
|
||||||
|
# security_opt:
|
||||||
|
# - label:disable
|
||||||
kafka_zookeeper:
|
kafka_zookeeper:
|
||||||
image: zookeeper:3.4.9
|
image: confluentinc/cp-zookeeper
|
||||||
hostname: kafka_zookeeper
|
ports:
|
||||||
|
- 2181:2181
|
||||||
environment:
|
environment:
|
||||||
ZOO_MY_ID: 1
|
ZOOKEEPER_CLIENT_PORT: 2181
|
||||||
ZOO_PORT: 2181
|
# security_opt:
|
||||||
ZOO_SERVERS: server.1=kafka_zookeeper:2888:3888
|
# - label:disable
|
||||||
security_opt:
|
|
||||||
- label:disable
|
|
||||||
|
|
||||||
kafka1:
|
kafka1:
|
||||||
image: confluentinc/cp-kafka:5.2.0
|
image: confluentinc/cp-kafka
|
||||||
hostname: kafka1
|
hostname: kafka1
|
||||||
ports:
|
ports:
|
||||||
- ${KAFKA_EXTERNAL_PORT:-8081}:${KAFKA_EXTERNAL_PORT:-8081}
|
- ${KAFKA_EXTERNAL_PORT}:${KAFKA_EXTERNAL_PORT}
|
||||||
environment:
|
environment:
|
||||||
KAFKA_ADVERTISED_LISTENERS: INSIDE://localhost:${KAFKA_EXTERNAL_PORT},OUTSIDE://kafka1:19092
|
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT, PLAINTEXT_HOST:PLAINTEXT
|
||||||
|
# KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INSIDE:PLAINTEXT,OUTSIDE:PLAINTEXT
|
||||||
|
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka1:19092, PLAINTEXT_HOST://localhost:${KAFKA_EXTERNAL_PORT}
|
||||||
KAFKA_ADVERTISED_HOST_NAME: kafka1
|
KAFKA_ADVERTISED_HOST_NAME: kafka1
|
||||||
KAFKA_LISTENERS: INSIDE://0.0.0.0:${KAFKA_EXTERNAL_PORT},OUTSIDE://0.0.0.0:19092
|
# KAFKA_LISTENERS: INSIDE://0.0.0.0:${KAFKA_EXTERNAL_PORT},OUTSIDE://0.0.0.0:19092
|
||||||
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INSIDE:PLAINTEXT,OUTSIDE:PLAINTEXT
|
KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
|
||||||
KAFKA_INTER_BROKER_LISTENER_NAME: INSIDE
|
# KAFKA_BROKER_ID: 1
|
||||||
KAFKA_BROKER_ID: 1
|
KAFKA_ZOOKEEPER_CONNECT: kafka_zookeeper:2181
|
||||||
KAFKA_ZOOKEEPER_CONNECT: "kafka_zookeeper:2181"
|
# KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
|
||||||
KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
|
|
||||||
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
|
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
|
||||||
depends_on:
|
depends_on:
|
||||||
- kafka_zookeeper
|
- kafka_zookeeper
|
||||||
security_opt:
|
|
||||||
- label:disable
|
|
||||||
|
|
||||||
# kafka2:
|
|
||||||
# image: confluentinc/cp-kafka:5.2.0
|
|
||||||
# hostname: kafka2
|
|
||||||
# ports:
|
|
||||||
# - ${KAFKA2_EXTERNAL_PORT:-8082}:${KAFKA2_EXTERNAL_PORT:-8082}
|
|
||||||
# environment:
|
|
||||||
# # KAFKA_EXTERNAL_PORT: ${KAFKA2_EXTERNAL_PORT}
|
|
||||||
# # KAFKA_HOST: ${KAFKA2_HOST}
|
|
||||||
# KAFKA_ADVERTISED_LISTENERS: INSIDE://localhost:${KAFKA2_EXTERNAL_PORT},OUTSIDE://kafka2:19093
|
|
||||||
# KAFKA_ADVERTISED_HOST_NAME: kafka2
|
|
||||||
# KAFKA_LISTENERS: INSIDE://0.0.0.0:${KAFKA_EXTERNAL_PORT},OUTSIDE://0.0.0.0:19093
|
|
||||||
# KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INSIDE:PLAINTEXT,OUTSIDE:PLAINTEXT
|
|
||||||
# KAFKA_INTER_BROKER_LISTENER_NAME: INSIDE
|
|
||||||
# KAFKA_BROKER_ID: 2
|
|
||||||
# KAFKA_ZOOKEEPER_CONNECT: "kafka_zookeeper:2181"
|
|
||||||
# KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
|
|
||||||
# KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
|
|
||||||
# depends_on:
|
|
||||||
# - kafka_zookeeper
|
|
||||||
# security_opt:
|
# security_opt:
|
||||||
# - label:disable
|
# - label:disable
|
||||||
|
|
||||||
@ -57,34 +43,38 @@ services:
|
|||||||
image: confluentinc/cp-schema-registry:5.2.0
|
image: confluentinc/cp-schema-registry:5.2.0
|
||||||
hostname: schema-registry
|
hostname: schema-registry
|
||||||
ports:
|
ports:
|
||||||
- ${SCHEMA_REGISTRY_EXTERNAL_PORT:-12313}:${SCHEMA_REGISTRY_INTERNAL_PORT:-12313}
|
- ${SCHEMA_REGISTRY_EXTERNAL_PORT}:${SCHEMA_REGISTRY_EXTERNAL_PORT}
|
||||||
environment:
|
environment:
|
||||||
SCHEMA_REGISTRY_HOST_NAME: schema-registry
|
SCHEMA_REGISTRY_HOST_NAME: schema-registry
|
||||||
SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
|
# SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
|
||||||
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka1:19092
|
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka1:19092
|
||||||
|
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: kafka_zookeeper:2181
|
||||||
|
SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:${SCHEMA_REGISTRY_EXTERNAL_PORT:-12313}
|
||||||
depends_on:
|
depends_on:
|
||||||
- kafka_zookeeper
|
- kafka_zookeeper
|
||||||
- kafka1
|
- kafka1
|
||||||
security_opt:
|
# security_opt:
|
||||||
- label:disable
|
# - label:disable
|
||||||
|
|
||||||
schema-registry-auth:
|
schema-registry-auth:
|
||||||
image: confluentinc/cp-schema-registry:5.2.0
|
image: confluentinc/cp-schema-registry:5.2.0
|
||||||
hostname: schema-registry-auth
|
hostname: schema-registry-auth
|
||||||
ports:
|
ports:
|
||||||
- ${SCHEMA_REGISTRY_AUTH_EXTERNAL_PORT:-12313}:${SCHEMA_REGISTRY_INTERNAL_PORT:-12313}
|
- ${SCHEMA_REGISTRY_AUTH_EXTERNAL_PORT}:${SCHEMA_REGISTRY_AUTH_EXTERNAL_PORT}
|
||||||
environment:
|
environment:
|
||||||
SCHEMA_REGISTRY_EXTERNAL_PORT: ${SCHEMA_REGISTRY_AUTH_EXTERNAL_PORT}
|
# SCHEMA_REGISTRY_EXTERNAL_PORT: ${SCHEMA_REGISTRY_AUTH_EXTERNAL_PORT}
|
||||||
SCHEMA_REGISTRY_HOST_NAME: schema-registry-auth
|
SCHEMA_REGISTRY_HOST_NAME: schema-registry-auth
|
||||||
SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
|
SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:${SCHEMA_REGISTRY_AUTH_EXTERNAL_PORT}
|
||||||
|
# SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
|
||||||
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka1:19092
|
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka1:19092
|
||||||
|
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: kafka_zookeeper:2181
|
||||||
SCHEMA_REGISTRY_AUTHENTICATION_METHOD: BASIC
|
SCHEMA_REGISTRY_AUTHENTICATION_METHOD: BASIC
|
||||||
SCHEMA_REGISTRY_AUTHENTICATION_ROLES: schemaadmin, schemauser
|
SCHEMA_REGISTRY_AUTHENTICATION_ROLES: user
|
||||||
SCHEMA_REGISTRY_AUTHENTICATION_REALM: RealmFooBar
|
SCHEMA_REGISTRY_AUTHENTICATION_REALM: RealmFooBar
|
||||||
SCHEMA_REGISTRY_OPTS: "-Djava.security.auth.login.config=/etc/schema-registry/secrets/schema_registry_jaas.conf"
|
SCHEMA_REGISTRY_OPTS: "-Djava.security.auth.login.config=/etc/schema-registry/secrets/schema_registry_jaas.conf"
|
||||||
SCHEMA_REGISTRY_GROUP_ID: auth
|
# SCHEMA_REGISTRY_GROUP_ID: auth
|
||||||
SCHEMA_REGISTRY_ZK_NAMESPACE: auth
|
SCHEMA_REGISTRY_ZK_NAMESPACE: schema_registry_auth
|
||||||
SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemaauth
|
SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas2
|
||||||
volumes:
|
volumes:
|
||||||
- ${SCHEMA_REGISTRY_DIR:-}/secrets:/etc/schema-registry/secrets
|
- ${SCHEMA_REGISTRY_DIR:-}/secrets:/etc/schema-registry/secrets
|
||||||
depends_on:
|
depends_on:
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import io
|
import io
|
||||||
import logging
|
import logging
|
||||||
|
import time
|
||||||
|
|
||||||
import avro.schema
|
import avro.schema
|
||||||
import pytest
|
import pytest
|
||||||
@ -46,14 +47,21 @@ def run_query(instance, query, data=None, settings=None):
|
|||||||
def test_select(started_cluster):
|
def test_select(started_cluster):
|
||||||
# type: (ClickHouseCluster) -> None
|
# type: (ClickHouseCluster) -> None
|
||||||
|
|
||||||
schema_registry_client = CachedSchemaRegistryClient(
|
time.sleep(3)
|
||||||
"http://localhost:{}".format(started_cluster.schema_registry_port)
|
|
||||||
)
|
# schema_registry_client = CachedSchemaRegistryClient(
|
||||||
|
# "http://localhost:{}".format(started_cluster.schema_registry_port)
|
||||||
|
# )
|
||||||
|
reg_url="http://localhost:{}".format(
|
||||||
|
started_cluster.schema_registry_port)
|
||||||
|
arg={'url':reg_url}
|
||||||
|
|
||||||
|
schema_registry_client = CachedSchemaRegistryClient(arg)
|
||||||
serializer = MessageSerializer(schema_registry_client)
|
serializer = MessageSerializer(schema_registry_client)
|
||||||
|
|
||||||
schema = avro.schema.make_avsc_object(
|
schema = avro.schema.make_avsc_object(
|
||||||
{
|
{
|
||||||
"name": "test_record",
|
"name": "test_record1",
|
||||||
"type": "record",
|
"type": "record",
|
||||||
"fields": [{"name": "value", "type": "long"}],
|
"fields": [{"name": "value", "type": "long"}],
|
||||||
}
|
}
|
||||||
@ -62,14 +70,14 @@ def test_select(started_cluster):
|
|||||||
buf = io.BytesIO()
|
buf = io.BytesIO()
|
||||||
for x in range(0, 3):
|
for x in range(0, 3):
|
||||||
message = serializer.encode_record_with_schema(
|
message = serializer.encode_record_with_schema(
|
||||||
"test_subject", schema, {"value": x}
|
"test_subject1", schema, {"value": x}
|
||||||
)
|
)
|
||||||
buf.write(message)
|
buf.write(message)
|
||||||
data = buf.getvalue()
|
data = buf.getvalue()
|
||||||
|
|
||||||
instance = started_cluster.instances["dummy"] # type: ClickHouseInstance
|
instance = started_cluster.instances["dummy"] # type: ClickHouseInstance
|
||||||
schema_registry_url = "http://{}:{}".format(
|
schema_registry_url = "http://{}:{}".format(
|
||||||
started_cluster.schema_registry_host, 8081
|
started_cluster.schema_registry_host, started_cluster.schema_registry_port
|
||||||
)
|
)
|
||||||
|
|
||||||
run_query(instance, "create table avro_data(value Int64) engine = Memory()")
|
run_query(instance, "create table avro_data(value Int64) engine = Memory()")
|
||||||
@ -85,6 +93,7 @@ def test_select(started_cluster):
|
|||||||
|
|
||||||
# def test_select_auth(started_cluster):
|
# def test_select_auth(started_cluster):
|
||||||
# # type: (ClickHouseCluster) -> None
|
# # type: (ClickHouseCluster) -> None
|
||||||
|
# time.sleep(5)
|
||||||
|
|
||||||
# reg_url="http://localhost:{}".format(
|
# reg_url="http://localhost:{}".format(
|
||||||
# started_cluster.schema_registry_auth_port)
|
# started_cluster.schema_registry_auth_port)
|
||||||
@ -95,7 +104,7 @@ def test_select(started_cluster):
|
|||||||
|
|
||||||
# schema = avro.schema.make_avsc_object(
|
# schema = avro.schema.make_avsc_object(
|
||||||
# {
|
# {
|
||||||
# "name": "test_record",
|
# "name": "test_record_auth",
|
||||||
# "type": "record",
|
# "type": "record",
|
||||||
# "fields": [{"name": "value", "type": "long"}],
|
# "fields": [{"name": "value", "type": "long"}],
|
||||||
# }
|
# }
|
||||||
@ -104,7 +113,7 @@ def test_select(started_cluster):
|
|||||||
# buf = io.BytesIO()
|
# buf = io.BytesIO()
|
||||||
# for x in range(0, 3):
|
# for x in range(0, 3):
|
||||||
# message = serializer.encode_record_with_schema(
|
# message = serializer.encode_record_with_schema(
|
||||||
# "test_subject", schema, {"value": x}
|
# "test_subject_auth", schema, {"value": x}
|
||||||
# )
|
# )
|
||||||
# buf.write(message)
|
# buf.write(message)
|
||||||
# data = buf.getvalue()
|
# data = buf.getvalue()
|
||||||
@ -112,7 +121,7 @@ def test_select(started_cluster):
|
|||||||
# instance = started_cluster.instances["dummy"] # type: ClickHouseInstance
|
# instance = started_cluster.instances["dummy"] # type: ClickHouseInstance
|
||||||
# schema_registry_url = "http://{}:{}@{}:{}".format(
|
# schema_registry_url = "http://{}:{}@{}:{}".format(
|
||||||
# 'schemauser', 'letmein',
|
# 'schemauser', 'letmein',
|
||||||
# started_cluster.schema_registry_auth_host, 8081
|
# started_cluster.schema_registry_auth_host, started_cluster.schema_registry_auth_port
|
||||||
# )
|
# )
|
||||||
|
|
||||||
# run_query(instance, "create table avro_data_auth(value Int64) engine = Memory()")
|
# run_query(instance, "create table avro_data_auth(value Int64) engine = Memory()")
|
||||||
|
Loading…
Reference in New Issue
Block a user