ClickHouse/tests/queries/0_stateless/00900_long_parquet_load.sh

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

72 lines
3.1 KiB
Bash
Raw Normal View History

2019-02-19 20:51:44 +00:00
#!/usr/bin/env bash
2023-08-01 15:39:56 +00:00
# Tags: long, no-fasttest, no-debug
2019-02-19 20:51:44 +00:00
#
# Load all possible .parquet files found in submodules.
# TODO: Add more files.
#
# Also 5 sample files from
# wget https://github.com/Teradata/kylo/raw/master/samples/sample-data/parquet/userdata1.parquet
# ...
# wget https://github.com/Teradata/kylo/raw/master/samples/sample-data/parquet/userdata5.parquet
# set -x
CUR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
2020-12-28 11:46:53 +00:00
# shellcheck source=../shell_config.sh
. "$CUR_DIR"/../shell_config.sh
2019-02-19 20:51:44 +00:00
CB_DIR=$(dirname "$CLICKHOUSE_CLIENT_BINARY")
2020-11-07 13:25:57 +00:00
[ "$CB_DIR" == "." ] && ROOT_DIR=$CUR_DIR/../../..
[ -z "$ROOT_DIR" ] && ROOT_DIR=$CB_DIR/../..
2019-02-19 20:51:44 +00:00
DATA_DIR=$CUR_DIR/data_parquet
2020-11-07 13:25:57 +00:00
[ -n "$ROOT_DIR" ] && [ -z "$PARQUET_READER" ] && PARQUET_READER="$ROOT_DIR"/contrib/arrow/cpp/build/release/parquet-reader
2019-02-19 20:51:44 +00:00
# To update:
# cp $ROOT_DIR/contrib/arrow/cpp/submodules/parquet-testing/data/*.parquet $ROOT_DIR/contrib/arrow/python/pyarrow/tests/data/parquet/*.parquet $CUR_DIR/data_parquet/
2020-11-08 02:27:33 +00:00
# ClickHouse Parquet reader doesn't support such complex types, so I didn't burrow into the issue.
2020-11-07 13:25:57 +00:00
# There is failure due parsing nested arrays or nested maps with NULLs:
# ../contrib/arrow/cpp/src/arrow/array/array_nested.cc:192: Check failed: (self->list_type_->value_type()->id()) == (data->child_data[0]->type->id())
2020-11-08 02:27:33 +00:00
# Strange behaviour for repeated_no_annotation.parquet around __buitin_expect, so this file was disabled:
2020-11-08 02:27:33 +00:00
# debug:
# ../contrib/arrow/cpp/src/arrow/array/array_nested.cc:193: Check failed: self->list_type_->value_type()->Equals(data->child_data[0]->type)
# release:
# Code: 349. DB::Ex---tion: Can not insert NULL data into non-nullable column "phoneNumbers": data for INSERT was parsed from stdin
2019-02-19 20:51:44 +00:00
for NAME in $(find "$DATA_DIR"/*.parquet -print0 | xargs -0 -n 1 basename | LC_ALL=C sort); do
2019-02-19 20:51:44 +00:00
JSON=$DATA_DIR/$NAME.json
COLUMNS_FILE=$DATA_DIR/$NAME.columns
2023-08-16 22:49:00 +00:00
{ [ -z "$PARQUET_READER" ] || [ ! -s "$PARQUET_READER" ]; } && [ ! -s "$COLUMNS_FILE" ] && continue
2023-08-16 22:49:00 +00:00
echo "=== Try load data from $NAME"
2019-02-19 20:51:44 +00:00
# If you want change or add .parquet file - rm data_parquet/*.json data_parquet/*.columns
2020-11-07 13:25:57 +00:00
[ -n "$PARQUET_READER" ] && [ ! -s "$COLUMNS_FILE" ] && [ ! -s "$JSON" ] && "$PARQUET_READER" --json "$DATA_DIR"/"$NAME" > "$JSON"
[ ! -s "$COLUMNS_FILE" ] && "$CUR_DIR"/helpers/00900_parquet_create_table_columns.py "$JSON" > "$COLUMNS_FILE"
2019-02-19 20:51:44 +00:00
# Debug only:
2020-11-07 13:25:57 +00:00
# [ -n "$PARQUET_READER" ] && $PARQUET_READER $DATA_DIR/$NAME > $DATA_DIR/$NAME.dump
2019-02-19 20:51:44 +00:00
2020-11-07 13:25:57 +00:00
# COLUMNS=`$CUR_DIR/00900_parquet_create_table_columns.py $JSON` 2>&1 || continue
COLUMNS=$(cat "$COLUMNS_FILE") || continue
2019-02-19 20:51:44 +00:00
${CLICKHOUSE_CLIENT} --query="DROP TABLE IF EXISTS parquet_load"
2021-05-27 19:01:06 +00:00
$CLICKHOUSE_CLIENT --multiquery <<EOF
SET allow_experimental_map_type = 1;
CREATE TABLE parquet_load ($COLUMNS) ENGINE = Memory;
EOF
2019-02-19 20:51:44 +00:00
2020-11-07 13:25:57 +00:00
# Some files contain unsupported data structures, exception is ok.
2023-08-16 22:49:00 +00:00
${CLICKHOUSE_CLIENT} --query="INSERT INTO parquet_load FORMAT Parquet" < "$DATA_DIR"/"$NAME" 2>&1 | sed 's/Exception/Ex---tion/'
2019-02-19 20:51:44 +00:00
${CLICKHOUSE_CLIENT} --query="SELECT * FROM parquet_load ORDER BY tuple(*) LIMIT 100"
${CLICKHOUSE_CLIENT} --query="DROP TABLE parquet_load"
2019-02-19 20:51:44 +00:00
done