2020-12-07 15:07:40 +00:00
|
|
|
--- See also tests/queries/0_stateless/01581_deduplicate_by_columns_local.sql
|
2020-12-01 09:10:12 +00:00
|
|
|
|
|
|
|
--- replicated case
|
|
|
|
|
|
|
|
-- Just in case if previous tests run left some stuff behind.
|
2020-12-21 10:44:30 +00:00
|
|
|
DROP TABLE IF EXISTS replicated_deduplicate_by_columns_r1 SYNC;
|
|
|
|
DROP TABLE IF EXISTS replicated_deduplicate_by_columns_r2 SYNC;
|
2020-12-01 09:10:12 +00:00
|
|
|
|
|
|
|
SET replication_alter_partitions_sync = 2;
|
|
|
|
|
|
|
|
-- IRL insert_replica_id were filled from hostname
|
|
|
|
CREATE TABLE IF NOT EXISTS replicated_deduplicate_by_columns_r1 (
|
2020-12-21 10:44:30 +00:00
|
|
|
id Int32, val UInt32, unique_value UInt64 MATERIALIZED rowNumberInBlock()
|
2020-12-01 09:10:12 +00:00
|
|
|
) ENGINE=ReplicatedMergeTree('/clickhouse/tables/test_01581/replicated_deduplicate', 'r1') ORDER BY id;
|
|
|
|
|
|
|
|
CREATE TABLE IF NOT EXISTS replicated_deduplicate_by_columns_r2 (
|
2020-12-21 10:44:30 +00:00
|
|
|
id Int32, val UInt32, unique_value UInt64 MATERIALIZED rowNumberInBlock()
|
2020-12-01 09:10:12 +00:00
|
|
|
) ENGINE=ReplicatedMergeTree('/clickhouse/tables/test_01581/replicated_deduplicate', 'r2') ORDER BY id;
|
|
|
|
|
2020-12-08 16:44:34 +00:00
|
|
|
|
|
|
|
-- insert some data, 2 records: (3, 1003), (4, 1004) are duplicated and have difference in unique_value / insert_replica_id
|
2020-12-01 09:10:12 +00:00
|
|
|
-- (1, 1001), (5, 2005) has full duplicates
|
|
|
|
INSERT INTO replicated_deduplicate_by_columns_r1 VALUES (1, 1001), (1, 1001), (2, 1002), (3, 1003), (4, 1004), (1, 2001), (9, 1002);
|
2020-12-08 16:44:34 +00:00
|
|
|
INSERT INTO replicated_deduplicate_by_columns_r2 VALUES (1, 1001), (2, 2002), (3, 1003), (4, 1004), (5, 2005), (5, 2005);
|
|
|
|
|
2020-12-21 10:44:30 +00:00
|
|
|
-- make sure that all data is present on all replicas
|
2020-12-01 09:10:12 +00:00
|
|
|
SYSTEM SYNC REPLICA replicated_deduplicate_by_columns_r2;
|
|
|
|
SYSTEM SYNC REPLICA replicated_deduplicate_by_columns_r1;
|
|
|
|
|
|
|
|
SELECT 'check that we have a data';
|
2020-12-21 10:44:30 +00:00
|
|
|
SELECT 'r1', id, val, count(), uniqExact(unique_value) FROM replicated_deduplicate_by_columns_r1 GROUP BY id, val ORDER BY id, val;
|
|
|
|
SELECT 'r2', id, val, count(), uniqExact(unique_value) FROM replicated_deduplicate_by_columns_r2 GROUP BY id, val ORDER BY id, val;
|
|
|
|
|
2020-12-01 09:10:12 +00:00
|
|
|
|
|
|
|
-- NOTE: here and below we need FINAL to force deduplication in such a small set of data in only 1 part.
|
|
|
|
-- that should remove full duplicates
|
|
|
|
OPTIMIZE TABLE replicated_deduplicate_by_columns_r1 FINAL DEDUPLICATE;
|
|
|
|
|
|
|
|
SELECT 'after old OPTIMIZE DEDUPLICATE';
|
2020-12-21 10:44:30 +00:00
|
|
|
SELECT 'r1', id, val, count(), uniqExact(unique_value) FROM replicated_deduplicate_by_columns_r1 GROUP BY id, val ORDER BY id, val;
|
|
|
|
SELECT 'r2', id, val, count(), uniqExact(unique_value) FROM replicated_deduplicate_by_columns_r2 GROUP BY id, val ORDER BY id, val;
|
2020-12-01 09:10:12 +00:00
|
|
|
|
|
|
|
OPTIMIZE TABLE replicated_deduplicate_by_columns_r1 FINAL DEDUPLICATE BY id, val;
|
|
|
|
OPTIMIZE TABLE replicated_deduplicate_by_columns_r1 FINAL DEDUPLICATE BY COLUMNS('[id, val]');
|
2020-12-21 10:44:30 +00:00
|
|
|
OPTIMIZE TABLE replicated_deduplicate_by_columns_r1 FINAL DEDUPLICATE BY COLUMNS('[i]') EXCEPT(unique_value);
|
2020-12-01 09:10:12 +00:00
|
|
|
|
|
|
|
SELECT 'check data again after multiple deduplications with new syntax';
|
2020-12-21 10:44:30 +00:00
|
|
|
SELECT 'r1', id, val, count(), uniqExact(unique_value) FROM replicated_deduplicate_by_columns_r1 GROUP BY id, val ORDER BY id, val;
|
|
|
|
SELECT 'r2', id, val, count(), uniqExact(unique_value) FROM replicated_deduplicate_by_columns_r2 GROUP BY id, val ORDER BY id, val;
|
2020-12-01 09:10:12 +00:00
|
|
|
|
|
|
|
-- cleanup the mess
|
2020-12-21 16:36:02 +00:00
|
|
|
DROP TABLE replicated_deduplicate_by_columns_r1;
|
|
|
|
DROP TABLE replicated_deduplicate_by_columns_r2;
|