ClickHouse/tests/integration/test_format_avro_confluent/test.py

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

247 lines
7.4 KiB
Python
Raw Normal View History

2020-02-03 00:02:19 +00:00
import io
import logging
import time
2024-09-27 10:19:39 +00:00
from urllib import parse
2020-02-03 00:02:19 +00:00
import avro.schema
import pytest
2021-02-24 11:46:58 +00:00
from confluent_kafka.avro.cached_schema_registry_client import (
CachedSchemaRegistryClient,
)
2020-10-02 16:54:07 +00:00
from confluent_kafka.avro.serializer.message_serializer import MessageSerializer
2024-09-27 10:19:39 +00:00
from helpers.cluster import ClickHouseCluster, ClickHouseInstance, is_arm
# Skip on ARM due to Confluent/Kafka
if is_arm():
pytestmark = pytest.mark.skip
2020-02-03 00:02:19 +00:00
@pytest.fixture(scope="module")
2021-02-24 11:46:58 +00:00
def started_cluster():
2020-02-03 00:02:19 +00:00
try:
cluster = ClickHouseCluster(__file__)
2023-04-29 21:23:55 +00:00
cluster.add_instance("dummy", with_kafka=True, with_secrets=True)
2020-02-03 00:02:19 +00:00
logging.info("Starting cluster...")
cluster.start()
logging.info("Cluster started")
yield cluster
finally:
cluster.shutdown()
def run_query(instance, query, data=None, settings=None):
2020-02-03 00:02:19 +00:00
# type: (ClickHouseInstance, str, object, dict) -> str
logging.info("Running query '{}'...".format(query))
# use http to force parsing on server
if not data:
data = " " # make POST request
result = instance.http_query(query, data=data, params=settings)
2020-02-03 00:02:19 +00:00
logging.info("Query finished")
return result
2021-02-24 11:46:58 +00:00
def test_select(started_cluster):
2020-02-03 00:02:19 +00:00
# type: (ClickHouseCluster) -> None
2023-04-29 21:23:55 +00:00
reg_url = "http://localhost:{}".format(started_cluster.schema_registry_port)
arg = {"url": reg_url}
schema_registry_client = CachedSchemaRegistryClient(arg)
2020-02-03 00:02:19 +00:00
serializer = MessageSerializer(schema_registry_client)
schema = avro.schema.make_avsc_object(
{
"name": "test_record1",
2020-02-03 00:02:19 +00:00
"type": "record",
"fields": [{"name": "value", "type": "long"}],
}
)
buf = io.BytesIO()
for x in range(0, 3):
message = serializer.encode_record_with_schema(
"test_subject1", schema, {"value": x}
2020-02-03 00:02:19 +00:00
)
buf.write(message)
data = buf.getvalue()
2020-02-03 00:02:19 +00:00
2021-02-24 11:46:58 +00:00
instance = started_cluster.instances["dummy"] # type: ClickHouseInstance
schema_registry_url = "http://{}:{}".format(
started_cluster.schema_registry_host, started_cluster.schema_registry_port
2020-02-03 00:02:19 +00:00
)
2020-02-03 00:02:19 +00:00
run_query(instance, "create table avro_data(value Int64) engine = Memory()")
settings = {"format_avro_schema_registry_url": schema_registry_url}
run_query(instance, "insert into avro_data format AvroConfluent", data, settings)
2020-02-03 00:02:19 +00:00
stdout = run_query(instance, "select * from avro_data")
assert list(map(str.split, stdout.splitlines())) == [
["0"],
["1"],
["2"],
]
def test_select_auth(started_cluster):
# type: (ClickHouseCluster) -> None
reg_url = "http://localhost:{}".format(started_cluster.schema_registry_auth_port)
arg = {
"url": reg_url,
"basic.auth.credentials.source": "USER_INFO",
"basic.auth.user.info": "schemauser:letmein",
}
schema_registry_client = CachedSchemaRegistryClient(arg)
serializer = MessageSerializer(schema_registry_client)
schema = avro.schema.make_avsc_object(
{
"name": "test_record_auth",
"type": "record",
"fields": [{"name": "value", "type": "long"}],
}
)
buf = io.BytesIO()
for x in range(0, 3):
message = serializer.encode_record_with_schema(
"test_subject_auth", schema, {"value": x}
)
buf.write(message)
data = buf.getvalue()
instance = started_cluster.instances["dummy"] # type: ClickHouseInstance
schema_registry_url = "http://{}:{}@{}:{}".format(
"schemauser",
"letmein",
started_cluster.schema_registry_auth_host,
started_cluster.schema_registry_auth_port,
)
run_query(instance, "create table avro_data_auth(value Int64) engine = Memory()")
settings = {"format_avro_schema_registry_url": schema_registry_url}
run_query(
instance, "insert into avro_data_auth format AvroConfluent", data, settings
)
stdout = run_query(instance, "select * from avro_data_auth")
assert list(map(str.split, stdout.splitlines())) == [
["0"],
["1"],
["2"],
]
def test_select_auth_encoded(started_cluster):
# type: (ClickHouseCluster) -> None
reg_url = "http://localhost:{}".format(started_cluster.schema_registry_auth_port)
arg = {
"url": reg_url,
"basic.auth.credentials.source": "USER_INFO",
"basic.auth.user.info": "schemauser:letmein",
}
schema_registry_client = CachedSchemaRegistryClient(arg)
serializer = MessageSerializer(schema_registry_client)
schema = avro.schema.make_avsc_object(
{
"name": "test_record_auth_encoded",
"type": "record",
"fields": [{"name": "value", "type": "long"}],
}
)
buf = io.BytesIO()
for x in range(0, 3):
message = serializer.encode_record_with_schema(
"test_subject_auth_encoded", schema, {"value": x}
)
buf.write(message)
data = buf.getvalue()
instance = started_cluster.instances["dummy"] # type: ClickHouseInstance
schema_registry_url = "http://{}:{}@{}:{}".format(
parse.quote_plus("schemauser/slash"),
parse.quote_plus("letmein"),
started_cluster.schema_registry_auth_host,
started_cluster.schema_registry_auth_port,
)
run_query(
instance, "create table avro_data_auth_encoded(value Int64) engine = Memory()"
)
settings = {"format_avro_schema_registry_url": schema_registry_url}
run_query(
instance,
"insert into avro_data_auth_encoded format AvroConfluent",
data,
settings,
)
stdout = run_query(instance, "select * from avro_data_auth_encoded")
assert list(map(str.split, stdout.splitlines())) == [
["0"],
["1"],
["2"],
]
def test_select_auth_encoded_complex(started_cluster):
# type: (ClickHouseCluster) -> None
reg_url = "http://localhost:{}".format(started_cluster.schema_registry_auth_port)
arg = {
"url": reg_url,
"basic.auth.credentials.source": "USER_INFO",
"basic.auth.user.info": "schemauser:letmein",
}
schema_registry_client = CachedSchemaRegistryClient(arg)
serializer = MessageSerializer(schema_registry_client)
schema = avro.schema.make_avsc_object(
{
"name": "test_record_auth_encoded_complex",
"type": "record",
"fields": [{"name": "value", "type": "long"}],
}
)
buf = io.BytesIO()
for x in range(0, 3):
message = serializer.encode_record_with_schema(
"test_subject_auth_encoded_complex", schema, {"value": x}
)
buf.write(message)
data = buf.getvalue()
instance = started_cluster.instances["dummy"] # type: ClickHouseInstance
schema_registry_url = "http://{}:{}@{}:{}".format(
parse.quote_plus("complexschemauser"),
parse.quote_plus("letmein%@:/"),
started_cluster.schema_registry_auth_host,
started_cluster.schema_registry_auth_port,
)
run_query(
instance,
"create table avro_data_auth_encoded_complex(value Int64) engine = Memory()",
)
settings = {"format_avro_schema_registry_url": schema_registry_url}
run_query(
instance,
"insert into avro_data_auth_encoded_complex format AvroConfluent",
data,
settings,
)
stdout = run_query(instance, "select * from avro_data_auth_encoded_complex")
assert list(map(str.split, stdout.splitlines())) == [
["0"],
["1"],
["2"],
]