#!/usr/bin/env bash CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) # shellcheck source=../shell_config.sh . "$CURDIR"/../shell_config.sh WORKING_FOLDER_01528="${CLICKHOUSE_TMP}/01528_clickhouse_local_prepare_parts" rm -rf "${WORKING_FOLDER_01528}" mkdir -p "${WORKING_FOLDER_01528}/metadata/local" ## Checks scenario of preparing parts offline by clickhouse-local ## that is the metadata for the table we want to fill ## schema should match the schema of the table from server ## (the easiest way is just to copy it from the server) cat < "${WORKING_FOLDER_01528}/metadata/local/test.sql" ATTACH TABLE local.test (id UInt64, d Date, s String) Engine=MergeTree ORDER BY id PARTITION BY toYYYYMM(d); EOF ################# echo "Option 1. Prepare parts from from table with Engine=File defined in metadata, read from an arbitrary path" ## Source file: cat < "${WORKING_FOLDER_01528}/data.csv" 1,2020-01-01,"String" 2,2020-02-02,"Another string" 3,2020-03-03,"One more string" 4,2020-01-02,"String for first partition" EOF ## metadata written into file cat < "${WORKING_FOLDER_01528}/metadata/local/data_csv.sql" ATTACH TABLE local.data_csv (id UInt64, d Date, s String) Engine=File(CSV, '${WORKING_FOLDER_01528}/data.csv'); EOF ## feed the table ${CLICKHOUSE_LOCAL} --query "INSERT INTO local.test SELECT * FROM local.data_csv;" -- --path="${WORKING_FOLDER_01528}" ## check the parts were created ${CLICKHOUSE_LOCAL} --query "SELECT * FROM local.test WHERE id < 10 ORDER BY id;" -- --path="${WORKING_FOLDER_01528}" ################# echo "Option 2. Prepare parts from from table with Engine=File defined in metadata, read from stdin (pipe)" cat < "${WORKING_FOLDER_01528}/metadata/local/stdin.sql" ATTACH TABLE local.stdin (id UInt64, d Date, s String) Engine=File(CSV, stdin); EOF cat <