mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-22 15:42:02 +00:00
fix race on loading dictionaries
This commit is contained in:
parent
ad69fbb972
commit
6cf3e5b77a
@ -1238,8 +1238,13 @@ try
|
||||
global_context->setMacros(std::make_unique<Macros>(*config, "macros", log));
|
||||
global_context->setExternalAuthenticatorsConfig(*config);
|
||||
|
||||
global_context->loadOrReloadDictionaries(*config);
|
||||
global_context->loadOrReloadUserDefinedExecutableFunctions(*config);
|
||||
if (global_context->isServerCompletelyStarted())
|
||||
{
|
||||
/// It does not make sense to reload anything before server has started.
|
||||
/// Moreover, it may break initialization order.
|
||||
global_context->loadOrReloadDictionaries(*config);
|
||||
global_context->loadOrReloadUserDefinedExecutableFunctions(*config);
|
||||
}
|
||||
|
||||
global_context->setRemoteHostFilter(*config);
|
||||
|
||||
|
@ -17,7 +17,6 @@ import urllib.parse
|
||||
import shlex
|
||||
import urllib3
|
||||
import requests
|
||||
import pyspark
|
||||
|
||||
try:
|
||||
# Please, add modules that required for specific tests only here.
|
||||
@ -33,6 +32,7 @@ try:
|
||||
import nats
|
||||
import ssl
|
||||
import meilisearch
|
||||
import pyspark
|
||||
from confluent_kafka.avro.cached_schema_registry_client import (
|
||||
CachedSchemaRegistryClient,
|
||||
)
|
||||
|
@ -191,3 +191,23 @@ def test_dependent_dict_table_distr(node):
|
||||
node.restart_clickhouse()
|
||||
|
||||
query("DROP DATABASE IF EXISTS test_db;")
|
||||
|
||||
|
||||
def test_no_lazy_load():
|
||||
node2.query("create database no_lazy")
|
||||
node2.query(
|
||||
"create table no_lazy.src (n int, m int) engine=MergeTree order by n partition by n % 100"
|
||||
)
|
||||
node2.query("insert into no_lazy.src select number, number from numbers(0, 99)")
|
||||
node2.query("insert into no_lazy.src select number, number from numbers(100, 99)")
|
||||
node2.query(
|
||||
"create dictionary no_lazy.dict (n int, mm int) primary key n "
|
||||
"source(clickhouse(query 'select n, m + sleepEachRow(0.1) as mm from no_lazy.src')) "
|
||||
"lifetime(min 0 max 0) layout(complex_key_hashed_array(shards 10))"
|
||||
)
|
||||
|
||||
node2.restart_clickhouse()
|
||||
|
||||
assert "42\n" == node2.query("select dictGet('no_lazy.dict', 'mm', 42)")
|
||||
|
||||
node2.query("drop database no_lazy")
|
||||
|
Loading…
Reference in New Issue
Block a user