mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-23 16:12:01 +00:00
Merge pull request #63527 from ClickHouse/everything-should-work-on-aarch64-part-2
Almost everything should work on AArch64 (Part of #58061)
This commit is contained in:
commit
536398a916
@ -28,7 +28,7 @@ namespace ErrorCodes
|
||||
|
||||
template <typename Distance>
|
||||
AnnoyIndexWithSerialization<Distance>::AnnoyIndexWithSerialization(size_t dimensions)
|
||||
: Base::AnnoyIndex(dimensions)
|
||||
: Base::AnnoyIndex(static_cast<int>(dimensions))
|
||||
{
|
||||
}
|
||||
|
||||
|
@ -181,11 +181,8 @@ elif [[ "$USE_AZURE_STORAGE_FOR_MERGE_TREE" == "1" ]]; then
|
||||
ln -sf $SRC_PATH/config.d/azure_storage_policy_by_default.xml $DEST_SERVER_PATH/config.d/
|
||||
fi
|
||||
|
||||
ARM="aarch64"
|
||||
OS="$(uname -m)"
|
||||
if [[ -n "$EXPORT_S3_STORAGE_POLICIES" ]]; then
|
||||
echo "$OS"
|
||||
if [[ "$USE_DATABASE_REPLICATED" -eq 1 ]] || [[ "$OS" == "$ARM" ]]; then
|
||||
if [[ "$USE_DATABASE_REPLICATED" -eq 1 ]]; then
|
||||
echo "Azure configuration will not be added"
|
||||
else
|
||||
echo "Adding azure configuration"
|
||||
|
@ -1,4 +1,4 @@
|
||||
-- Tags: no-tsan, no-asan, no-ubsan, no-msan, no-debug, no-cpu-aarch64
|
||||
-- Tags: no-tsan, no-asan, no-ubsan, no-msan, no-debug
|
||||
|
||||
-- This file contains tests for the event_time_microseconds field for various tables.
|
||||
-- Note: Only event_time_microseconds for asynchronous_metric_log table is tested via
|
||||
|
@ -1,5 +1,5 @@
|
||||
#!/usr/bin/env bash
|
||||
# Tags: no-fasttest, no-parallel, no-s3-storage, no-random-settings, no-cpu-aarch64, no-replicated-database
|
||||
# Tags: no-fasttest, no-parallel, no-s3-storage, no-random-settings, no-replicated-database
|
||||
|
||||
# set -x
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
#!/usr/bin/env bash
|
||||
# Tags: long, no-fasttest, no-parallel, no-s3-storage, no-random-settings, no-cpu-aarch64
|
||||
# Tags: long, no-fasttest, no-parallel, no-s3-storage, no-random-settings
|
||||
|
||||
# set -x
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
-- Tags: no-fasttest, no-parallel, no-cpu-aarch64
|
||||
-- Tags: no-fasttest, no-parallel
|
||||
-- Tag no-fasttest: Depends on Java
|
||||
|
||||
insert into table function hdfs('hdfs://localhost:12222/test_1.tsv', 'TSV', 'column1 UInt32, column2 UInt32, column3 UInt32') select 1, 2, 3 settings hdfs_truncate_on_insert=1;
|
||||
|
@ -1,5 +1,5 @@
|
||||
-- Tags: no-fasttest, no-cpu-aarch64
|
||||
-- Tags: no-fasttest
|
||||
SELECT * FROM hdfsCluster('test_shard_localhost', '', 'TSV'); -- { serverError BAD_ARGUMENTS }
|
||||
SELECT * FROM hdfsCluster('test_shard_localhost', ' ', 'TSV'); -- { serverError BAD_ARGUMENTS }
|
||||
SELECT * FROM hdfsCluster('test_shard_localhost', '/', 'TSV'); -- { serverError BAD_ARGUMENTS }
|
||||
SELECT * FROM hdfsCluster('test_shard_localhost', 'http/', 'TSV'); -- { serverError BAD_ARGUMENTS }
|
||||
SELECT * FROM hdfsCluster('test_shard_localhost', 'http/', 'TSV'); -- { serverError BAD_ARGUMENTS }
|
||||
|
@ -1,4 +1,4 @@
|
||||
-- Tags: no-fasttest, no-parallel, no-cpu-aarch64
|
||||
-- Tags: no-fasttest, no-parallel
|
||||
-- Tag no-fasttest: Depends on Java
|
||||
|
||||
insert into table function hdfs('hdfs://localhost:12222/test_02458_1.tsv', 'TSV', 'column1 UInt32, column2 UInt32, column3 UInt32') select 1, 2, 3 settings hdfs_truncate_on_insert=1;
|
||||
@ -9,4 +9,3 @@ desc hdfsCluster('test_cluster_one_shard_three_replicas_localhost', 'hdfs://loca
|
||||
|
||||
select * from hdfsCluster('test_cluster_one_shard_three_replicas_localhost', 'hdfs://localhost:12222/test_02458_{1,2}.tsv') order by c1, c2, c3;
|
||||
select * from hdfsCluster('test_cluster_one_shard_three_replicas_localhost', 'hdfs://localhost:12222/test_02458_{1,2}.tsv', 'TSV') order by c1, c2, c3;
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
-- Tags: no-fasttest, no-parallel, no-cpu-aarch64
|
||||
-- Tags: no-fasttest, no-parallel
|
||||
-- Tag no-fasttest: Depends on Java
|
||||
|
||||
insert into table function hdfs('hdfs://localhost:12222/test_02536.jsonl', 'TSV') select '{"x" : {"a" : 1, "b" : 2}}' settings hdfs_truncate_on_insert=1;
|
||||
@ -9,4 +9,3 @@ insert into test select * from hdfsCluster('test_cluster_two_shards_localhost',
|
||||
insert into test select * from hdfsCluster('test_cluster_two_shards_localhost', 'hdfs://localhost:12222/test_02536.jsonl') settings use_structure_from_insertion_table_in_table_functions=1;
|
||||
select * from test;
|
||||
drop table test;
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
-- Tags: no-fasttest, no-cpu-aarch64, no-msan
|
||||
-- Tags: no-fasttest, no-msan
|
||||
|
||||
drop table if exists dummy;
|
||||
CREATE TABLE dummy ( num1 Int32, num2 Enum8('foo' = 0, 'bar' = 1, 'tar' = 2) )
|
||||
|
@ -1,5 +1,5 @@
|
||||
#!/usr/bin/env bash
|
||||
# Tags: no-fasttest, no-cpu-aarch64
|
||||
# Tags: no-fasttest
|
||||
|
||||
CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
|
||||
# shellcheck source=../shell_config.sh
|
||||
@ -20,4 +20,3 @@ $CLICKHOUSE_LOCAL -q "select count() from file('$ERRORS_FILE', CSV)"
|
||||
rm $ERRORS_FILE
|
||||
|
||||
rm $FILE
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user