Fix possible heap-buffer-overflow in Avro

This commit is contained in:
avogar 2022-07-22 17:59:43 +00:00
parent d23da91fe7
commit 8283cc9e2c
5 changed files with 16 additions and 2 deletions

2
contrib/avro vendored

@ -1 +1 @@
Subproject commit aac4e605f070f2abd9a5f02ae70c17e4011588e8
Subproject commit 7832659ec986075d560f930c288e973c64679552

View File

@ -1,5 +1,5 @@
#!/usr/bin/env bash
# Tags: no-fasttest
# Tags: no-fasttest, no-parallel
CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=../shell_config.sh

View File

@ -0,0 +1,13 @@
#!/usr/bin/env bash
# Tags: no-fasttest, no-parallel
CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=../shell_config.sh
. "$CURDIR"/../shell_config.sh
USER_FILES_PATH=$(clickhouse-client --query "select _path,_file from file('nonexist.txt', 'CSV', 'val1 char')" 2>&1 | grep Exception | awk '{gsub("/nonexist.txt","",$9); print $9}')
cp $CURDIR/data_avro/corrupted.avro $USER_FILES_PATH/
$CLICKHOUSE_CLIENT -q "select * from file(corrupted.avro)" 2>&1 | grep -F -q "Cannot read compressed data" && echo "OK" || echo "FAIL"

Binary file not shown.