2020-08-28 17:40:45 +00:00
|
|
|
import random
|
2020-09-16 04:26:10 +00:00
|
|
|
import string
|
2020-08-28 17:40:45 +00:00
|
|
|
|
2020-09-16 04:26:10 +00:00
|
|
|
import pytest
|
2020-08-28 17:40:45 +00:00
|
|
|
from helpers.cluster import ClickHouseCluster
|
|
|
|
|
|
|
|
cluster = ClickHouseCluster(__file__)
|
|
|
|
|
2020-09-14 12:49:04 +00:00
|
|
|
node1 = cluster.add_instance('node1', main_configs=['configs/default_compression.xml', 'configs/wide_parts_only.xml'], with_zookeeper=True)
|
|
|
|
node2 = cluster.add_instance('node2', main_configs=['configs/default_compression.xml', 'configs/wide_parts_only.xml'], with_zookeeper=True)
|
|
|
|
node3 = cluster.add_instance('node3', main_configs=['configs/default_compression.xml', 'configs/wide_parts_only.xml'], image='yandex/clickhouse-server', tag='20.3.16', stay_alive=True, with_installed_binary=True)
|
2020-12-23 11:53:49 +00:00
|
|
|
node4 = cluster.add_instance('node4')
|
2020-08-28 17:40:45 +00:00
|
|
|
|
|
|
|
@pytest.fixture(scope="module")
|
|
|
|
def start_cluster():
|
|
|
|
try:
|
|
|
|
cluster.start()
|
|
|
|
|
|
|
|
yield cluster
|
|
|
|
finally:
|
|
|
|
cluster.shutdown()
|
|
|
|
|
|
|
|
|
|
|
|
def get_compression_codec_byte(node, table_name, part_name):
|
2020-09-16 04:26:10 +00:00
|
|
|
cmd = "tail -c +17 /var/lib/clickhouse/data/default/{}/{}/data1.bin | od -x -N 1 | head -n 1 | awk '{{print $2}}'".format(
|
|
|
|
table_name, part_name)
|
2020-08-28 17:40:45 +00:00
|
|
|
return node.exec_in_container(["bash", "-c", cmd]).strip()
|
|
|
|
|
|
|
|
|
|
|
|
def get_second_multiple_codec_byte(node, table_name, part_name):
|
2020-09-16 04:26:10 +00:00
|
|
|
cmd = "tail -c +17 /var/lib/clickhouse/data/default/{}/{}/data1.bin | od -x -j 11 -N 1 | head -n 1 | awk '{{print $2}}'".format(
|
|
|
|
table_name, part_name)
|
2020-08-28 17:40:45 +00:00
|
|
|
return node.exec_in_container(["bash", "-c", cmd]).strip()
|
|
|
|
|
|
|
|
|
|
|
|
def get_random_string(length):
|
|
|
|
return ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(length))
|
|
|
|
|
|
|
|
|
|
|
|
CODECS_MAPPING = {
|
|
|
|
'LZ4': '0082',
|
|
|
|
'LZ4HC': '0082', # not an error, same byte
|
|
|
|
'ZSTD': '0090',
|
|
|
|
'Multiple': '0091',
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def test_default_codec_single(start_cluster):
|
|
|
|
for i, node in enumerate([node1, node2]):
|
|
|
|
node.query("""
|
|
|
|
CREATE TABLE compression_table (
|
|
|
|
key UInt64,
|
|
|
|
data1 String CODEC(Default)
|
|
|
|
) ENGINE = ReplicatedMergeTree('/t', '{}') ORDER BY tuple() PARTITION BY key;
|
|
|
|
""".format(i))
|
|
|
|
|
|
|
|
# ZSTD(10) and ZSTD(10) after merge
|
|
|
|
node1.query("INSERT INTO compression_table VALUES (1, 'x')")
|
|
|
|
|
|
|
|
# ZSTD(10) and LZ4HC(10) after merge
|
|
|
|
node1.query("INSERT INTO compression_table VALUES (2, '{}')".format(get_random_string(2048)))
|
|
|
|
|
|
|
|
# ZSTD(10) and LZ4 after merge
|
2020-08-26 15:29:46 +00:00
|
|
|
node1.query("INSERT INTO compression_table VALUES (3, '{}')".format(get_random_string(22048)))
|
|
|
|
|
|
|
|
node2.query("SYSTEM SYNC REPLICA compression_table", timeout=15)
|
|
|
|
|
|
|
|
# to reload parts
|
|
|
|
node1.query("DETACH TABLE compression_table")
|
|
|
|
node2.query("DETACH TABLE compression_table")
|
|
|
|
|
|
|
|
node1.query("ATTACH TABLE compression_table")
|
|
|
|
node2.query("ATTACH TABLE compression_table")
|
|
|
|
|
|
|
|
node1.query("SYSTEM FLUSH LOGS")
|
|
|
|
node2.query("SYSTEM FLUSH LOGS")
|
2020-08-28 17:40:45 +00:00
|
|
|
|
|
|
|
# Same codec for all
|
|
|
|
assert get_compression_codec_byte(node1, "compression_table", "1_0_0_0") == CODECS_MAPPING['ZSTD']
|
2020-09-16 04:26:10 +00:00
|
|
|
assert node1.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table' and name = '1_0_0_0'") == "ZSTD(10)\n"
|
|
|
|
assert node2.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table' and name = '1_0_0_0'") == "ZSTD(10)\n"
|
2020-08-26 15:29:46 +00:00
|
|
|
|
2020-08-28 17:40:45 +00:00
|
|
|
assert get_compression_codec_byte(node1, "compression_table", "2_0_0_0") == CODECS_MAPPING['ZSTD']
|
2020-09-16 04:26:10 +00:00
|
|
|
assert node1.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table' and name = '2_0_0_0'") == "ZSTD(10)\n"
|
|
|
|
assert node2.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table' and name = '2_0_0_0'") == "ZSTD(10)\n"
|
2020-08-26 15:29:46 +00:00
|
|
|
|
2020-08-28 17:40:45 +00:00
|
|
|
assert get_compression_codec_byte(node1, "compression_table", "3_0_0_0") == CODECS_MAPPING['ZSTD']
|
2020-09-16 04:26:10 +00:00
|
|
|
assert node1.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table' and name = '3_0_0_0'") == "ZSTD(10)\n"
|
|
|
|
assert node2.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table' and name = '3_0_0_0'") == "ZSTD(10)\n"
|
2020-08-28 17:40:45 +00:00
|
|
|
|
|
|
|
# just to be sure that replication works
|
2020-08-26 15:29:46 +00:00
|
|
|
node1.query("OPTIMIZE TABLE compression_table FINAL")
|
|
|
|
|
2020-08-28 17:40:45 +00:00
|
|
|
node2.query("SYSTEM SYNC REPLICA compression_table", timeout=15)
|
|
|
|
|
2020-08-26 15:29:46 +00:00
|
|
|
# to reload parts
|
|
|
|
node1.query("DETACH TABLE compression_table")
|
|
|
|
node2.query("DETACH TABLE compression_table")
|
|
|
|
|
|
|
|
node1.query("ATTACH TABLE compression_table")
|
|
|
|
node2.query("ATTACH TABLE compression_table")
|
|
|
|
|
|
|
|
node1.query("SYSTEM FLUSH LOGS")
|
|
|
|
node2.query("SYSTEM FLUSH LOGS")
|
2020-08-28 17:40:45 +00:00
|
|
|
|
|
|
|
assert get_compression_codec_byte(node1, "compression_table", "1_0_0_1") == CODECS_MAPPING['ZSTD']
|
2020-09-16 04:26:10 +00:00
|
|
|
assert node1.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table' and name = '1_0_0_1'") == "ZSTD(10)\n"
|
|
|
|
assert node2.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table' and name = '1_0_0_1'") == "ZSTD(10)\n"
|
2020-08-26 15:29:46 +00:00
|
|
|
|
2020-08-28 17:40:45 +00:00
|
|
|
assert get_compression_codec_byte(node1, "compression_table", "2_0_0_1") == CODECS_MAPPING['LZ4HC']
|
2020-09-16 04:26:10 +00:00
|
|
|
assert node1.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table' and name = '2_0_0_1'") == "LZ4HC(5)\n"
|
|
|
|
assert node2.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table' and name = '2_0_0_1'") == "LZ4HC(5)\n"
|
2020-08-26 15:29:46 +00:00
|
|
|
|
2020-08-28 17:40:45 +00:00
|
|
|
assert get_compression_codec_byte(node1, "compression_table", "3_0_0_1") == CODECS_MAPPING['LZ4']
|
2020-09-16 04:26:10 +00:00
|
|
|
assert node1.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table' and name = '3_0_0_1'") == "LZ4\n"
|
|
|
|
assert node2.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table' and name = '3_0_0_1'") == "LZ4\n"
|
2020-08-28 17:40:45 +00:00
|
|
|
|
|
|
|
assert node1.query("SELECT COUNT() FROM compression_table") == "3\n"
|
|
|
|
assert node2.query("SELECT COUNT() FROM compression_table") == "3\n"
|
|
|
|
|
|
|
|
|
|
|
|
def test_default_codec_multiple(start_cluster):
|
|
|
|
for i, node in enumerate([node1, node2]):
|
|
|
|
node.query("""
|
|
|
|
CREATE TABLE compression_table_multiple (
|
|
|
|
key UInt64,
|
|
|
|
data1 String CODEC(NONE, Default)
|
|
|
|
) ENGINE = ReplicatedMergeTree('/d', '{}') ORDER BY tuple() PARTITION BY key;
|
|
|
|
""".format(i), settings={"allow_suspicious_codecs": 1})
|
|
|
|
|
|
|
|
# ZSTD(10) and ZSTD(10) after merge
|
|
|
|
node1.query("INSERT INTO compression_table_multiple VALUES (1, 'x')")
|
|
|
|
|
|
|
|
# ZSTD(10) and LZ4HC(10) after merge
|
|
|
|
node1.query("INSERT INTO compression_table_multiple VALUES (2, '{}')".format(get_random_string(2048)))
|
|
|
|
|
|
|
|
# ZSTD(10) and LZ4 after merge
|
2020-08-26 15:29:46 +00:00
|
|
|
node1.query("INSERT INTO compression_table_multiple VALUES (3, '{}')".format(get_random_string(22048)))
|
2020-08-28 17:40:45 +00:00
|
|
|
|
|
|
|
# Same codec for all
|
|
|
|
assert get_compression_codec_byte(node1, "compression_table_multiple", "1_0_0_0") == CODECS_MAPPING['Multiple']
|
|
|
|
assert get_second_multiple_codec_byte(node1, "compression_table_multiple", "1_0_0_0") == CODECS_MAPPING['ZSTD']
|
2020-09-16 04:26:10 +00:00
|
|
|
assert node1.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table_multiple' and name = '1_0_0_0'") == "ZSTD(10)\n"
|
|
|
|
assert node2.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table_multiple' and name = '1_0_0_0'") == "ZSTD(10)\n"
|
2020-08-26 15:29:46 +00:00
|
|
|
|
2020-08-28 17:40:45 +00:00
|
|
|
assert get_compression_codec_byte(node1, "compression_table_multiple", "2_0_0_0") == CODECS_MAPPING['Multiple']
|
|
|
|
assert get_second_multiple_codec_byte(node1, "compression_table_multiple", "2_0_0_0") == CODECS_MAPPING['ZSTD']
|
2020-09-16 04:26:10 +00:00
|
|
|
assert node1.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table_multiple' and name = '2_0_0_0'") == "ZSTD(10)\n"
|
|
|
|
assert node2.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table_multiple' and name = '2_0_0_0'") == "ZSTD(10)\n"
|
2020-08-26 15:29:46 +00:00
|
|
|
|
2020-08-28 17:40:45 +00:00
|
|
|
assert get_compression_codec_byte(node1, "compression_table_multiple", "3_0_0_0") == CODECS_MAPPING['Multiple']
|
|
|
|
assert get_second_multiple_codec_byte(node1, "compression_table_multiple", "3_0_0_0") == CODECS_MAPPING['ZSTD']
|
2020-09-16 04:26:10 +00:00
|
|
|
assert node1.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table_multiple' and name = '3_0_0_0'") == "ZSTD(10)\n"
|
|
|
|
assert node2.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table_multiple' and name = '3_0_0_0'") == "ZSTD(10)\n"
|
2020-08-28 17:40:45 +00:00
|
|
|
|
|
|
|
node2.query("SYSTEM SYNC REPLICA compression_table_multiple", timeout=15)
|
|
|
|
|
|
|
|
node1.query("OPTIMIZE TABLE compression_table_multiple FINAL")
|
|
|
|
|
|
|
|
assert get_compression_codec_byte(node1, "compression_table_multiple", "1_0_0_1") == CODECS_MAPPING['Multiple']
|
|
|
|
assert get_second_multiple_codec_byte(node1, "compression_table_multiple", "1_0_0_1") == CODECS_MAPPING['ZSTD']
|
2020-09-16 04:26:10 +00:00
|
|
|
assert node1.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table_multiple' and name = '1_0_0_1'") == "ZSTD(10)\n"
|
|
|
|
assert node2.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table_multiple' and name = '1_0_0_1'") == "ZSTD(10)\n"
|
2020-08-26 15:29:46 +00:00
|
|
|
|
2020-08-25 15:02:32 +00:00
|
|
|
assert get_compression_codec_byte(node1, "compression_table_multiple", "2_0_0_1") == CODECS_MAPPING['Multiple']
|
|
|
|
assert get_second_multiple_codec_byte(node1, "compression_table_multiple", "2_0_0_1") == CODECS_MAPPING['LZ4HC']
|
2020-09-16 04:26:10 +00:00
|
|
|
assert node1.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table_multiple' and name = '2_0_0_1'") == "LZ4HC(5)\n"
|
|
|
|
assert node2.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table_multiple' and name = '2_0_0_1'") == "LZ4HC(5)\n"
|
2020-08-26 15:29:46 +00:00
|
|
|
|
2020-08-25 15:02:32 +00:00
|
|
|
assert get_compression_codec_byte(node1, "compression_table_multiple", "3_0_0_1") == CODECS_MAPPING['Multiple']
|
|
|
|
assert get_second_multiple_codec_byte(node1, "compression_table_multiple", "3_0_0_1") == CODECS_MAPPING['LZ4']
|
2020-09-16 04:26:10 +00:00
|
|
|
assert node1.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table_multiple' and name = '3_0_0_1'") == "LZ4\n"
|
|
|
|
assert node2.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table_multiple' and name = '3_0_0_1'") == "LZ4\n"
|
2020-08-25 15:02:32 +00:00
|
|
|
|
|
|
|
assert node1.query("SELECT COUNT() FROM compression_table_multiple") == "3\n"
|
|
|
|
assert node2.query("SELECT COUNT() FROM compression_table_multiple") == "3\n"
|
2020-08-28 09:07:20 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_default_codec_version_update(start_cluster):
|
|
|
|
node3.query("""
|
|
|
|
CREATE TABLE compression_table (
|
|
|
|
key UInt64 CODEC(LZ4HC(7)),
|
|
|
|
data1 String
|
|
|
|
) ENGINE = MergeTree ORDER BY tuple() PARTITION BY key;
|
|
|
|
""")
|
|
|
|
|
|
|
|
node3.query("INSERT INTO compression_table VALUES (1, 'x')")
|
|
|
|
node3.query("INSERT INTO compression_table VALUES (2, '{}')".format(get_random_string(2048)))
|
|
|
|
node3.query("INSERT INTO compression_table VALUES (3, '{}')".format(get_random_string(22048)))
|
|
|
|
|
|
|
|
node3.restart_with_latest_version()
|
|
|
|
|
2020-09-16 04:26:10 +00:00
|
|
|
assert node3.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table' and name = '1_1_1_0'") == "ZSTD(1)\n"
|
|
|
|
assert node3.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table' and name = '2_2_2_0'") == "ZSTD(1)\n"
|
|
|
|
assert node3.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table' and name = '3_3_3_0'") == "ZSTD(1)\n"
|
2020-08-28 09:07:20 +00:00
|
|
|
|
|
|
|
node3.query("OPTIMIZE TABLE compression_table FINAL")
|
|
|
|
|
2020-09-16 04:26:10 +00:00
|
|
|
assert node3.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table' and name = '1_1_1_1'") == "ZSTD(10)\n"
|
|
|
|
assert node3.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table' and name = '2_2_2_1'") == "LZ4HC(5)\n"
|
|
|
|
assert node3.query(
|
|
|
|
"SELECT default_compression_codec FROM system.parts WHERE table = 'compression_table' and name = '3_3_3_1'") == "LZ4\n"
|
2020-12-23 11:53:49 +00:00
|
|
|
|
|
|
|
def test_default_codec_for_compact_parts(start_cluster):
|
|
|
|
node4.query("""
|
|
|
|
CREATE TABLE compact_parts_table (
|
|
|
|
key UInt64,
|
|
|
|
data String
|
|
|
|
)
|
|
|
|
ENGINE MergeTree ORDER BY tuple()
|
|
|
|
""")
|
|
|
|
|
|
|
|
node4.query("INSERT INTO compact_parts_table VALUES (1, 'Hello world')")
|
|
|
|
assert node4.query("SELECT COUNT() FROM compact_parts_table") == "1\n"
|
|
|
|
|
|
|
|
node4.query("ALTER TABLE compact_parts_table DETACH PART 'all_1_1_0'")
|
|
|
|
|
|
|
|
node4.exec_in_container(["bash", "-c", "rm /var/lib/clickhouse/data/default/compact_parts_table/detached/all_1_1_0/default_compression_codec.txt"])
|
|
|
|
|
|
|
|
node4.query("ALTER TABLE compact_parts_table ATTACH PART 'all_1_1_0'")
|
|
|
|
|
|
|
|
assert node4.query("SELECT COUNT() FROM compact_parts_table") == "1\n"
|
|
|
|
|
|
|
|
node4.query("DETACH TABLE compact_parts_table")
|
|
|
|
node4.query("ATTACH TABLE compact_parts_table")
|
|
|
|
|
|
|
|
assert node4.query("SELECT COUNT() FROM compact_parts_table") == "1\n"
|