mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-26 09:32:01 +00:00
Merge branch 'master' into Issue_39395
This commit is contained in:
commit
7c8ceead63
14
.github/workflows/master.yml
vendored
14
.github/workflows/master.yml
vendored
@ -151,8 +151,8 @@ jobs:
|
||||
# shellcheck disable=SC2046
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
SplitBuildSmokeTest:
|
||||
needs: [BuilderDebSplitted]
|
||||
SharedBuildSmokeTest:
|
||||
needs: [BuilderDebShared]
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Set envs
|
||||
@ -171,7 +171,7 @@ jobs:
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{ env.REPORTS_PATH }}
|
||||
- name: Split build check
|
||||
- name: Shared build check
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
@ -598,7 +598,7 @@ jobs:
|
||||
##########################################################################################
|
||||
##################################### SPECIAL BUILDS #####################################
|
||||
##########################################################################################
|
||||
BuilderDebSplitted:
|
||||
BuilderDebShared:
|
||||
needs: [DockerHubPush]
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
@ -609,7 +609,7 @@ jobs:
|
||||
IMAGES_PATH=${{runner.temp}}/images_path
|
||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
||||
BUILD_NAME=binary_splitted
|
||||
BUILD_NAME=binary_shared
|
||||
EOF
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
@ -1012,7 +1012,7 @@ jobs:
|
||||
# - BuilderBinGCC
|
||||
- BuilderBinPPC64
|
||||
- BuilderBinClangTidy
|
||||
- BuilderDebSplitted
|
||||
- BuilderDebShared
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Set envs
|
||||
@ -3153,7 +3153,7 @@ jobs:
|
||||
- UnitTestsMsan
|
||||
- UnitTestsUBsan
|
||||
- UnitTestsReleaseClang
|
||||
- SplitBuildSmokeTest
|
||||
- SharedBuildSmokeTest
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
|
14
.github/workflows/pull_request.yml
vendored
14
.github/workflows/pull_request.yml
vendored
@ -216,8 +216,8 @@ jobs:
|
||||
# shellcheck disable=SC2046
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
SplitBuildSmokeTest:
|
||||
needs: [BuilderDebSplitted]
|
||||
SharedBuildSmokeTest:
|
||||
needs: [BuilderDebShared]
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Set envs
|
||||
@ -236,7 +236,7 @@ jobs:
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{ env.REPORTS_PATH }}
|
||||
- name: Split build check
|
||||
- name: Shared build check
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
@ -620,7 +620,7 @@ jobs:
|
||||
##########################################################################################
|
||||
##################################### SPECIAL BUILDS #####################################
|
||||
##########################################################################################
|
||||
BuilderDebSplitted:
|
||||
BuilderDebShared:
|
||||
needs: [DockerHubPush, FastTest, StyleCheck]
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
@ -631,7 +631,7 @@ jobs:
|
||||
IMAGES_PATH=${{runner.temp}}/images_path
|
||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
||||
BUILD_NAME=binary_splitted
|
||||
BUILD_NAME=binary_shared
|
||||
EOF
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
@ -1024,7 +1024,7 @@ jobs:
|
||||
# - BuilderBinGCC
|
||||
- BuilderBinPPC64
|
||||
- BuilderBinClangTidy
|
||||
- BuilderDebSplitted
|
||||
- BuilderDebShared
|
||||
runs-on: [self-hosted, style-checker]
|
||||
if: ${{ success() || failure() }}
|
||||
steps:
|
||||
@ -3416,7 +3416,7 @@ jobs:
|
||||
- UnitTestsMsan
|
||||
- UnitTestsUBsan
|
||||
- UnitTestsReleaseClang
|
||||
- SplitBuildSmokeTest
|
||||
- SharedBuildSmokeTest
|
||||
- CompatibilityCheck
|
||||
- IntegrationTestsFlakyCheck
|
||||
- Jepsen
|
||||
|
@ -15,4 +15,4 @@ ClickHouse® is an open-source column-oriented database management system that a
|
||||
* [Contacts](https://clickhouse.com/company/contact) can help to get your questions answered if there are any.
|
||||
|
||||
## Upcoming events
|
||||
* **v22.8 Release Webinar** Original creator, co-founder, and CTO of ClickHouse Alexey Milovidov will walk us through the highlights of the release, provide live demos, and share vision into what is coming in the roadmap.
|
||||
* [**v22.8 Release Webinar**](https://clickhouse.com/company/events/v22-8-release-webinar) Original creator, co-founder, and CTO of ClickHouse Alexey Milovidov will walk us through the highlights of the release, provide live demos, and share vision into what is coming in the roadmap.
|
||||
|
2
contrib/arrow
vendored
2
contrib/arrow
vendored
@ -1 +1 @@
|
||||
Subproject commit 3e03c6de41a86df2fc54a61e9be1abaefeff6b0e
|
||||
Subproject commit efdcd015cfdee1b6aa349c9ca227ca12c3d697f5
|
@ -100,12 +100,12 @@ def run_docker_image_with_env(
|
||||
subprocess.check_call(cmd, shell=True)
|
||||
|
||||
|
||||
def is_release_build(build_type, package_type, sanitizer, split_binary):
|
||||
def is_release_build(build_type, package_type, sanitizer, shared_libraries):
|
||||
return (
|
||||
build_type == ""
|
||||
and package_type == "deb"
|
||||
and sanitizer == ""
|
||||
and not split_binary
|
||||
and not shared_libraries
|
||||
)
|
||||
|
||||
|
||||
@ -116,7 +116,7 @@ def parse_env_variables(
|
||||
package_type,
|
||||
cache,
|
||||
distcc_hosts,
|
||||
split_binary,
|
||||
shared_libraries,
|
||||
clang_tidy,
|
||||
version,
|
||||
author,
|
||||
@ -202,7 +202,7 @@ def parse_env_variables(
|
||||
cmake_flags.append("-DCMAKE_INSTALL_PREFIX=/usr")
|
||||
cmake_flags.append("-DCMAKE_INSTALL_SYSCONFDIR=/etc")
|
||||
cmake_flags.append("-DCMAKE_INSTALL_LOCALSTATEDIR=/var")
|
||||
if is_release_build(build_type, package_type, sanitizer, split_binary):
|
||||
if is_release_build(build_type, package_type, sanitizer, shared_libraries):
|
||||
cmake_flags.append("-DSPLIT_DEBUG_SYMBOLS=ON")
|
||||
result.append("WITH_PERFORMANCE=1")
|
||||
if is_cross_arm:
|
||||
@ -219,7 +219,7 @@ def parse_env_variables(
|
||||
if package_type == "coverity":
|
||||
result.append("COMBINED_OUTPUT=coverity")
|
||||
result.append('COVERITY_TOKEN="$COVERITY_TOKEN"')
|
||||
elif split_binary:
|
||||
elif shared_libraries:
|
||||
result.append("COMBINED_OUTPUT=shared_build")
|
||||
|
||||
if sanitizer:
|
||||
@ -264,7 +264,7 @@ def parse_env_variables(
|
||||
result.append("BINARY_OUTPUT=tests")
|
||||
cmake_flags.append("-DENABLE_TESTS=1")
|
||||
|
||||
if split_binary:
|
||||
if shared_libraries:
|
||||
cmake_flags.append(
|
||||
"-DUSE_STATIC_LIBRARIES=0 -DSPLIT_SHARED_LIBRARIES=1"
|
||||
)
|
||||
@ -351,7 +351,7 @@ if __name__ == "__main__":
|
||||
default="",
|
||||
)
|
||||
|
||||
parser.add_argument("--split-binary", action="store_true")
|
||||
parser.add_argument("--shared-libraries", action="store_true")
|
||||
parser.add_argument("--clang-tidy", action="store_true")
|
||||
parser.add_argument("--cache", choices=("ccache", "distcc", ""), default="")
|
||||
parser.add_argument(
|
||||
@ -404,7 +404,7 @@ if __name__ == "__main__":
|
||||
args.package_type,
|
||||
args.cache,
|
||||
args.distcc_hosts,
|
||||
args.split_binary,
|
||||
args.shared_libraries,
|
||||
args.clang_tidy,
|
||||
args.version,
|
||||
args.author,
|
||||
|
@ -982,7 +982,7 @@ Default value: 2.
|
||||
**Example**
|
||||
|
||||
```xml
|
||||
<background_merges_mutations_concurrency_ratio>3</background_pbackground_merges_mutations_concurrency_ratio>
|
||||
<background_merges_mutations_concurrency_ratio>3</background_merges_mutations_concurrency_ratio>
|
||||
```
|
||||
|
||||
## background_move_pool_size {#background_move_pool_size}
|
||||
|
@ -25,6 +25,10 @@
|
||||
M(WriteBufferFromFileDescriptorWrite, "Number of writes (write/pwrite) to a file descriptor. Does not include sockets.") \
|
||||
M(WriteBufferFromFileDescriptorWriteFailed, "Number of times the write (write/pwrite) to a file descriptor have failed.") \
|
||||
M(WriteBufferFromFileDescriptorWriteBytes, "Number of bytes written to file descriptors. If the file is compressed, this will show compressed data size.") \
|
||||
M(FileSync, "Number of times the F_FULLFSYNC/fsync/fdatasync function was called for files.") \
|
||||
M(DirectorySync, "Number of times the F_FULLFSYNC/fsync/fdatasync function was called for directories.") \
|
||||
M(FileSyncElapsedMicroseconds, "Total time spent waiting for F_FULLFSYNC/fsync/fdatasync syscall for files.") \
|
||||
M(DirectorySyncElapsedMicroseconds, "Total time spent waiting for F_FULLFSYNC/fsync/fdatasync syscall for directories.") \
|
||||
M(ReadCompressedBytes, "Number of bytes (the number of bytes before decompression) read from compressed sources (files, network).") \
|
||||
M(CompressedReadBufferBlocks, "Number of compressed blocks (the blocks of data that are compressed independent of each other) read from compressed sources (files, network).") \
|
||||
M(CompressedReadBufferBytes, "Number of uncompressed bytes (the number of bytes after decompression) read from compressed sources (files, network).") \
|
||||
|
@ -34,6 +34,8 @@ namespace ProfileEvents
|
||||
extern const Event AIOWriteBytes;
|
||||
extern const Event AIORead;
|
||||
extern const Event AIOReadBytes;
|
||||
extern const Event FileSync;
|
||||
extern const Event FileSyncElapsedMicroseconds;
|
||||
}
|
||||
|
||||
namespace DB
|
||||
@ -544,6 +546,9 @@ public:
|
||||
file_path,
|
||||
std::to_string(bytes_written));
|
||||
|
||||
ProfileEvents::increment(ProfileEvents::FileSync);
|
||||
|
||||
Stopwatch watch;
|
||||
#if defined(OS_DARWIN)
|
||||
if (::fsync(file.fd) < 0)
|
||||
throwFromErrnoWithPath("Cannot fsync " + file_path, file_path, ErrorCodes::CANNOT_FSYNC);
|
||||
@ -551,6 +556,7 @@ public:
|
||||
if (::fdatasync(file.fd) < 0)
|
||||
throwFromErrnoWithPath("Cannot fdatasync " + file_path, file_path, ErrorCodes::CANNOT_FSYNC);
|
||||
#endif
|
||||
ProfileEvents::increment(ProfileEvents::FileSyncElapsedMicroseconds, watch.elapsedMicroseconds());
|
||||
|
||||
current_block_index += buffer_size_in_blocks;
|
||||
|
||||
|
@ -1,6 +1,8 @@
|
||||
#include <Disks/LocalDirectorySyncGuard.h>
|
||||
#include <Common/ProfileEvents.h>
|
||||
#include <Common/Exception.h>
|
||||
#include <Disks/IDisk.h>
|
||||
#include <Common/Stopwatch.h>
|
||||
#include <fcntl.h> // O_RDWR
|
||||
|
||||
/// OSX does not have O_DIRECTORY
|
||||
@ -8,6 +10,12 @@
|
||||
#define O_DIRECTORY O_RDWR
|
||||
#endif
|
||||
|
||||
namespace ProfileEvents
|
||||
{
|
||||
extern const Event DirectorySync;
|
||||
extern const Event DirectorySyncElapsedMicroseconds;
|
||||
}
|
||||
|
||||
namespace DB
|
||||
{
|
||||
|
||||
@ -29,8 +37,12 @@ LocalDirectorySyncGuard::LocalDirectorySyncGuard(const String & full_path)
|
||||
|
||||
LocalDirectorySyncGuard::~LocalDirectorySyncGuard()
|
||||
{
|
||||
ProfileEvents::increment(ProfileEvents::DirectorySync);
|
||||
|
||||
try
|
||||
{
|
||||
Stopwatch watch;
|
||||
|
||||
#if defined(OS_DARWIN)
|
||||
if (fcntl(fd, F_FULLFSYNC, 0))
|
||||
throwFromErrno("Cannot fcntl(F_FULLFSYNC)", ErrorCodes::CANNOT_FSYNC);
|
||||
@ -40,6 +52,8 @@ LocalDirectorySyncGuard::~LocalDirectorySyncGuard()
|
||||
#endif
|
||||
if (-1 == ::close(fd))
|
||||
throw Exception("Cannot close file", ErrorCodes::CANNOT_CLOSE_FILE);
|
||||
|
||||
ProfileEvents::increment(ProfileEvents::DirectorySyncElapsedMicroseconds, watch.elapsedMicroseconds());
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
|
@ -18,6 +18,8 @@ namespace ProfileEvents
|
||||
extern const Event WriteBufferFromFileDescriptorWriteFailed;
|
||||
extern const Event WriteBufferFromFileDescriptorWriteBytes;
|
||||
extern const Event DiskWriteElapsedMicroseconds;
|
||||
extern const Event FileSync;
|
||||
extern const Event FileSyncElapsedMicroseconds;
|
||||
}
|
||||
|
||||
namespace CurrentMetrics
|
||||
@ -113,12 +115,18 @@ void WriteBufferFromFileDescriptor::sync()
|
||||
/// If buffer has pending data - write it.
|
||||
next();
|
||||
|
||||
ProfileEvents::increment(ProfileEvents::FileSync);
|
||||
|
||||
Stopwatch watch;
|
||||
|
||||
/// Request OS to sync data with storage medium.
|
||||
#if defined(OS_DARWIN)
|
||||
int res = ::fsync(fd);
|
||||
#else
|
||||
int res = ::fdatasync(fd);
|
||||
#endif
|
||||
ProfileEvents::increment(ProfileEvents::FileSyncElapsedMicroseconds, watch.elapsedMicroseconds());
|
||||
|
||||
if (-1 == res)
|
||||
throwFromErrnoWithPath("Cannot fsync " + getFileName(), getFileName(), ErrorCodes::CANNOT_FSYNC);
|
||||
}
|
||||
|
@ -39,7 +39,7 @@ def _can_export_binaries(build_config: BuildConfig) -> bool:
|
||||
return False
|
||||
if build_config["bundled"] != "bundled":
|
||||
return False
|
||||
if build_config["splitted"] == "splitted":
|
||||
if build_config["libraries"] == "shared":
|
||||
return False
|
||||
if build_config["sanitizer"] != "":
|
||||
return True
|
||||
@ -68,8 +68,8 @@ def get_packager_cmd(
|
||||
cmd += f" --build-type={build_config['build_type']}"
|
||||
if build_config["sanitizer"]:
|
||||
cmd += f" --sanitizer={build_config['sanitizer']}"
|
||||
if build_config["splitted"] == "splitted":
|
||||
cmd += " --split-binary"
|
||||
if build_config["libraries"] == "shared":
|
||||
cmd += " --shared-libraries"
|
||||
if build_config["tidy"] == "enable":
|
||||
cmd += " --clang-tidy"
|
||||
|
||||
|
@ -37,7 +37,7 @@ class BuildResult:
|
||||
build_type,
|
||||
sanitizer,
|
||||
bundled,
|
||||
splitted,
|
||||
libraries,
|
||||
status,
|
||||
elapsed_seconds,
|
||||
with_coverage,
|
||||
@ -46,7 +46,7 @@ class BuildResult:
|
||||
self.build_type = build_type
|
||||
self.sanitizer = sanitizer
|
||||
self.bundled = bundled
|
||||
self.splitted = splitted
|
||||
self.libraries = libraries
|
||||
self.status = status
|
||||
self.elapsed_seconds = elapsed_seconds
|
||||
self.with_coverage = with_coverage
|
||||
@ -91,7 +91,7 @@ def get_failed_report(
|
||||
build_type="unknown",
|
||||
sanitizer="unknown",
|
||||
bundled="unknown",
|
||||
splitted="unknown",
|
||||
libraries="unknown",
|
||||
status=message,
|
||||
elapsed_seconds=0,
|
||||
with_coverage=False,
|
||||
@ -108,7 +108,7 @@ def process_report(
|
||||
build_type=build_config["build_type"],
|
||||
sanitizer=build_config["sanitizer"],
|
||||
bundled=build_config["bundled"],
|
||||
splitted=build_config["splitted"],
|
||||
libraries=build_config["libraries"],
|
||||
status="success" if build_report["status"] else "failure",
|
||||
elapsed_seconds=build_report["elapsed_seconds"],
|
||||
with_coverage=False,
|
||||
|
@ -14,7 +14,7 @@ CI_CONFIG = {
|
||||
"package_type": "deb",
|
||||
"static_binary_name": "amd64",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"libraries": "static",
|
||||
"additional_pkgs": True,
|
||||
"tidy": "disable",
|
||||
"with_coverage": False,
|
||||
@ -25,7 +25,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "",
|
||||
"package_type": "coverity",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"libraries": "static",
|
||||
"tidy": "disable",
|
||||
"with_coverage": False,
|
||||
"official": False,
|
||||
@ -37,7 +37,7 @@ CI_CONFIG = {
|
||||
"package_type": "deb",
|
||||
"static_binary_name": "aarch64",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"libraries": "static",
|
||||
"additional_pkgs": True,
|
||||
"tidy": "disable",
|
||||
"with_coverage": False,
|
||||
@ -48,7 +48,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "address",
|
||||
"package_type": "deb",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"libraries": "static",
|
||||
"tidy": "disable",
|
||||
"with_coverage": False,
|
||||
},
|
||||
@ -58,7 +58,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "undefined",
|
||||
"package_type": "deb",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"libraries": "static",
|
||||
"tidy": "disable",
|
||||
"with_coverage": False,
|
||||
},
|
||||
@ -68,7 +68,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "thread",
|
||||
"package_type": "deb",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"libraries": "static",
|
||||
"tidy": "disable",
|
||||
"with_coverage": False,
|
||||
},
|
||||
@ -78,7 +78,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "memory",
|
||||
"package_type": "deb",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"libraries": "static",
|
||||
"tidy": "disable",
|
||||
"with_coverage": False,
|
||||
},
|
||||
@ -88,7 +88,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "",
|
||||
"package_type": "deb",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"libraries": "static",
|
||||
"tidy": "disable",
|
||||
"with_coverage": False,
|
||||
},
|
||||
@ -98,7 +98,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "",
|
||||
"package_type": "binary",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"libraries": "static",
|
||||
"tidy": "disable",
|
||||
"with_coverage": False,
|
||||
},
|
||||
@ -109,17 +109,17 @@ CI_CONFIG = {
|
||||
"package_type": "binary",
|
||||
"static_binary_name": "debug-amd64",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"libraries": "static",
|
||||
"tidy": "enable",
|
||||
"with_coverage": False,
|
||||
},
|
||||
"binary_splitted": {
|
||||
"binary_shared": {
|
||||
"compiler": "clang-14",
|
||||
"build_type": "",
|
||||
"sanitizer": "",
|
||||
"package_type": "binary",
|
||||
"bundled": "bundled",
|
||||
"splitted": "splitted",
|
||||
"libraries": "shared",
|
||||
"tidy": "disable",
|
||||
"with_coverage": False,
|
||||
},
|
||||
@ -130,7 +130,7 @@ CI_CONFIG = {
|
||||
"package_type": "binary",
|
||||
"static_binary_name": "macos",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"libraries": "static",
|
||||
"tidy": "disable",
|
||||
"with_coverage": False,
|
||||
},
|
||||
@ -140,7 +140,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "",
|
||||
"package_type": "binary",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"libraries": "static",
|
||||
"tidy": "disable",
|
||||
"with_coverage": False,
|
||||
},
|
||||
@ -151,7 +151,7 @@ CI_CONFIG = {
|
||||
"package_type": "binary",
|
||||
"static_binary_name": "freebsd",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"libraries": "static",
|
||||
"tidy": "disable",
|
||||
"with_coverage": False,
|
||||
},
|
||||
@ -162,7 +162,7 @@ CI_CONFIG = {
|
||||
"package_type": "binary",
|
||||
"static_binary_name": "macos-aarch64",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"libraries": "static",
|
||||
"tidy": "disable",
|
||||
"with_coverage": False,
|
||||
},
|
||||
@ -173,7 +173,7 @@ CI_CONFIG = {
|
||||
"package_type": "binary",
|
||||
"static_binary_name": "powerpc64le",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"libraries": "static",
|
||||
"tidy": "disable",
|
||||
"with_coverage": False,
|
||||
},
|
||||
@ -192,7 +192,7 @@ CI_CONFIG = {
|
||||
],
|
||||
"ClickHouse special build check": [
|
||||
"binary_tidy",
|
||||
"binary_splitted",
|
||||
"binary_shared",
|
||||
"binary_darwin",
|
||||
"binary_aarch64",
|
||||
"binary_freebsd",
|
||||
@ -297,7 +297,7 @@ CI_CONFIG = {
|
||||
"required_build": "package_release",
|
||||
},
|
||||
"Split build smoke test": {
|
||||
"required_build": "binary_splitted",
|
||||
"required_build": "binary_shared",
|
||||
},
|
||||
"Unit tests (release-clang)": {
|
||||
"required_build": "binary_release",
|
||||
|
@ -290,7 +290,7 @@ tr:hover td {{filter: brightness(95%);}}
|
||||
<th>Build type</th>
|
||||
<th>Sanitizer</th>
|
||||
<th>Bundled</th>
|
||||
<th>Splitted</th>
|
||||
<th>Libraries</th>
|
||||
<th>Status</th>
|
||||
<th>Build log</th>
|
||||
<th>Build time</th>
|
||||
@ -335,7 +335,7 @@ def create_build_html_report(
|
||||
row += "<td>{}</td>".format("none")
|
||||
|
||||
row += "<td>{}</td>".format(build_result.bundled)
|
||||
row += "<td>{}</td>".format(build_result.splitted)
|
||||
row += "<td>{}</td>".format(build_result.libraries)
|
||||
|
||||
if build_result.status:
|
||||
style = _get_status_style(build_result.status)
|
||||
|
@ -46,5 +46,19 @@
|
||||
"test_storage_s3/test.py::test_url_reconnect_in_the_middle",
|
||||
"test_system_metrics/test.py::test_readonly_metrics",
|
||||
"test_system_replicated_fetches/test.py::test_system_replicated_fetches",
|
||||
"test_zookeeper_config_load_balancing/test.py::test_round_robin"
|
||||
"test_zookeeper_config_load_balancing/test.py::test_round_robin",
|
||||
|
||||
"test_tlsv1_3/test.py::test_https",
|
||||
"test_tlsv1_3/test.py::test_https_wrong_cert",
|
||||
"test_tlsv1_3/test.py::test_https_non_ssl_auth",
|
||||
"test_tlsv1_3/test.py::test_create_user",
|
||||
"test_user_ip_restrictions/test.py::test_ipv4",
|
||||
"test_user_ip_restrictions/test.py::test_ipv6",
|
||||
"test_ssl_cert_authentication/test.py::test_https",
|
||||
"test_ssl_cert_authentication/test.py::test_https_wrong_cert",
|
||||
"test_ssl_cert_authentication/test.py::test_https_non_ssl_auth",
|
||||
"test_ssl_cert_authentication/test.py::test_create_user",
|
||||
"test_grpc_protocol_ssl/test.py::test_secure_channel",
|
||||
"test_grpc_protocol_ssl/test.py::test_insecure_channel",
|
||||
"test_grpc_protocol_ssl/test.py::test_wrong_client_certificate"
|
||||
]
|
||||
|
@ -5,7 +5,8 @@ import grpc
|
||||
from helpers.cluster import ClickHouseCluster, run_and_check
|
||||
|
||||
GRPC_PORT = 9100
|
||||
NODE_IP = "10.5.172.77" # It's important for the node to work at this IP because 'server-cert.pem' requires that (see server-ext.cnf).
|
||||
# It's important for the node to work at this IP because 'server-cert.pem' requires that (see server-ext.cnf).
|
||||
NODE_IP = "10.5.172.77" # Never copy-paste this line
|
||||
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
|
||||
DEFAULT_ENCODING = "utf-8"
|
||||
|
||||
|
@ -5,7 +5,8 @@ import ssl
|
||||
import os.path
|
||||
|
||||
HTTPS_PORT = 8443
|
||||
NODE_IP = "10.5.172.77" # It's important for the node to work at this IP because 'server-cert.pem' requires that (see server-ext.cnf).
|
||||
# It's important for the node to work at this IP because 'server-cert.pem' requires that (see server-ext.cnf).
|
||||
NODE_IP = "10.5.172.77" # Never copy-paste this line
|
||||
NODE_IP_WITH_HTTPS_PORT = NODE_IP + ":" + str(HTTPS_PORT)
|
||||
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
|
@ -5,7 +5,8 @@ import ssl
|
||||
import os.path
|
||||
|
||||
HTTPS_PORT = 8443
|
||||
NODE_IP = "10.5.172.77" # It's important for the node to work at this IP because 'server-cert.pem' requires that (see server-ext.cnf).
|
||||
# It's important for the node to work at this IP because 'server-cert.pem' requires that (see server-ext.cnf).
|
||||
NODE_IP = "10.5.172.77" # Never copy-paste this line
|
||||
NODE_IP_WITH_HTTPS_PORT = NODE_IP + ":" + str(HTTPS_PORT)
|
||||
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
|
@ -8,47 +8,56 @@ node_ipv4 = cluster.add_instance(
|
||||
"node_ipv4",
|
||||
main_configs=[],
|
||||
user_configs=["configs/users_ipv4.xml"],
|
||||
ipv4_address="10.5.172.77",
|
||||
ipv4_address="10.5.172.77", # Never copy-paste this line
|
||||
)
|
||||
client_ipv4_ok = cluster.add_instance(
|
||||
"client_ipv4_ok", main_configs=[], user_configs=[], ipv4_address="10.5.172.10"
|
||||
"client_ipv4_ok",
|
||||
main_configs=[],
|
||||
user_configs=[],
|
||||
ipv4_address="10.5.172.10", # Never copy-paste this line
|
||||
)
|
||||
client_ipv4_ok_direct = cluster.add_instance(
|
||||
"client_ipv4_ok_direct", main_configs=[], user_configs=[], ipv4_address="10.5.173.1"
|
||||
"client_ipv4_ok_direct",
|
||||
main_configs=[],
|
||||
user_configs=[],
|
||||
ipv4_address="10.5.173.1", # Never copy-paste this line
|
||||
)
|
||||
client_ipv4_ok_full_mask = cluster.add_instance(
|
||||
"client_ipv4_ok_full_mask",
|
||||
main_configs=[],
|
||||
user_configs=[],
|
||||
ipv4_address="10.5.175.77",
|
||||
ipv4_address="10.5.175.77", # Never copy-paste this line
|
||||
)
|
||||
client_ipv4_bad = cluster.add_instance(
|
||||
"client_ipv4_bad", main_configs=[], user_configs=[], ipv4_address="10.5.173.10"
|
||||
"client_ipv4_bad",
|
||||
main_configs=[],
|
||||
user_configs=[],
|
||||
ipv4_address="10.5.173.10", # Never copy-paste this line
|
||||
)
|
||||
|
||||
node_ipv6 = cluster.add_instance(
|
||||
"node_ipv6",
|
||||
main_configs=["configs/config_ipv6.xml"],
|
||||
user_configs=["configs/users_ipv6.xml"],
|
||||
ipv6_address="2001:3984:3989::1:1000",
|
||||
ipv6_address="2001:3984:3989::1:1000", # Never copy-paste this line
|
||||
)
|
||||
client_ipv6_ok = cluster.add_instance(
|
||||
"client_ipv6_ok",
|
||||
main_configs=[],
|
||||
user_configs=[],
|
||||
ipv6_address="2001:3984:3989::5555",
|
||||
ipv6_address="2001:3984:3989::5555", # Never copy-paste this line
|
||||
)
|
||||
client_ipv6_ok_direct = cluster.add_instance(
|
||||
"client_ipv6_ok_direct",
|
||||
main_configs=[],
|
||||
user_configs=[],
|
||||
ipv6_address="2001:3984:3989::1:1111",
|
||||
ipv6_address="2001:3984:3989::1:1111", # Never copy-paste this line
|
||||
)
|
||||
client_ipv6_bad = cluster.add_instance(
|
||||
"client_ipv6_bad",
|
||||
main_configs=[],
|
||||
user_configs=[],
|
||||
ipv6_address="2001:3984:3989::1:1112",
|
||||
ipv6_address="2001:3984:3989::1:1112", # Never copy-paste this line
|
||||
)
|
||||
|
||||
|
||||
|
@ -85,11 +85,11 @@ select distinct 1 as x, 2 as y from distinct_in_order order by x;
|
||||
1 2
|
||||
select distinct 1 as x, 2 as y from distinct_in_order order by x, y;
|
||||
1 2
|
||||
select distinct a, 1 as x from distinct_in_order order by x;
|
||||
select a, x from (select distinct a, 1 as x from distinct_in_order order by x) order by a;
|
||||
0 1
|
||||
select distinct a, 1 as x, 2 as y from distinct_in_order order by a;
|
||||
0 1 2
|
||||
select distinct a, b, 1 as x, 2 as y from distinct_in_order order by a;
|
||||
select a, b, x, y from(select distinct a, b, 1 as x, 2 as y from distinct_in_order order by a) order by a, b;
|
||||
0 0 1 2
|
||||
0 1 1 2
|
||||
0 2 1 2
|
||||
@ -97,10 +97,10 @@ select distinct a, b, 1 as x, 2 as y from distinct_in_order order by a;
|
||||
0 4 1 2
|
||||
select distinct x, y from (select 1 as x, 2 as y from distinct_in_order order by x) order by y;
|
||||
1 2
|
||||
select distinct a, b, x, y from (select a, b, 1 as x, 2 as y from distinct_in_order order by a) order by b;
|
||||
select distinct a, b, x, y from (select a, b, 1 as x, 2 as y from distinct_in_order order by a) order by a, b;
|
||||
0 0 1 2
|
||||
0 1 1 2
|
||||
0 2 1 2
|
||||
0 3 1 2
|
||||
0 4 1 2
|
||||
-- check that distinct in order has the same result as ordinary distinct
|
||||
-- check that distinct in order returns the same result as ordinary distinct
|
||||
|
@ -48,16 +48,16 @@ select '-- distinct with constants columns';
|
||||
select distinct 1 as x, 2 as y from distinct_in_order;
|
||||
select distinct 1 as x, 2 as y from distinct_in_order order by x;
|
||||
select distinct 1 as x, 2 as y from distinct_in_order order by x, y;
|
||||
select distinct a, 1 as x from distinct_in_order order by x;
|
||||
select a, x from (select distinct a, 1 as x from distinct_in_order order by x) order by a;
|
||||
select distinct a, 1 as x, 2 as y from distinct_in_order order by a;
|
||||
select distinct a, b, 1 as x, 2 as y from distinct_in_order order by a;
|
||||
select a, b, x, y from(select distinct a, b, 1 as x, 2 as y from distinct_in_order order by a) order by a, b;
|
||||
select distinct x, y from (select 1 as x, 2 as y from distinct_in_order order by x) order by y;
|
||||
select distinct a, b, x, y from (select a, b, 1 as x, 2 as y from distinct_in_order order by a) order by b;
|
||||
select distinct a, b, x, y from (select a, b, 1 as x, 2 as y from distinct_in_order order by a) order by a, b;
|
||||
-- { echoOff }
|
||||
|
||||
drop table if exists distinct_in_order sync;
|
||||
|
||||
select '-- check that distinct in order has the same result as ordinary distinct';
|
||||
select '-- check that distinct in order returns the same result as ordinary distinct';
|
||||
drop table if exists distinct_cardinality_low sync;
|
||||
CREATE TABLE distinct_cardinality_low (low UInt64, medium UInt64, high UInt64) ENGINE MergeTree() ORDER BY (low, medium);
|
||||
INSERT INTO distinct_cardinality_low SELECT number % 1e1, number % 1e2, number % 1e3 FROM numbers_mt(1e4);
|
||||
|
73
tests/queries/0_stateless/02361_fsync_profile_events.sh
Executable file
73
tests/queries/0_stateless/02361_fsync_profile_events.sh
Executable file
@ -0,0 +1,73 @@
|
||||
#!/usr/bin/env bash
|
||||
# Tags: no-s3-storage
|
||||
# Tag no-s3-storage: s3 does not have fsync
|
||||
|
||||
CUR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
|
||||
# shellcheck source=../shell_config.sh
|
||||
. "$CUR_DIR"/../shell_config.sh
|
||||
|
||||
$CLICKHOUSE_CLIENT -nm -q "
|
||||
drop table if exists data_fsync_pe;
|
||||
|
||||
create table data_fsync_pe (key Int) engine=MergeTree()
|
||||
order by key
|
||||
settings
|
||||
min_rows_for_wide_part=2,
|
||||
fsync_after_insert=1,
|
||||
fsync_part_directory=1;
|
||||
"
|
||||
|
||||
ret=1
|
||||
# Retry in case of fsync/fdatasync was too fast
|
||||
# (FileSyncElapsedMicroseconds/DirectorySyncElapsedMicroseconds was 0)
|
||||
for i in {1..100}; do
|
||||
query_id="insert-$i-$CLICKHOUSE_DATABASE"
|
||||
|
||||
$CLICKHOUSE_CLIENT --query_id "$query_id" -q "insert into data_fsync_pe values (1)"
|
||||
|
||||
read -r FileSync FileOpen DirectorySync FileSyncElapsedMicroseconds DirectorySyncElapsedMicroseconds <<<"$(
|
||||
$CLICKHOUSE_CLIENT -nm --param_query_id "$query_id" -q "
|
||||
system flush logs;
|
||||
|
||||
select
|
||||
ProfileEvents['FileSync'],
|
||||
ProfileEvents['FileOpen'],
|
||||
ProfileEvents['DirectorySync'],
|
||||
ProfileEvents['FileSyncElapsedMicroseconds']>0,
|
||||
ProfileEvents['DirectorySyncElapsedMicroseconds']>0
|
||||
from system.query_log
|
||||
where
|
||||
event_date >= yesterday() and
|
||||
current_database = currentDatabase() and
|
||||
query_id = {query_id:String} and
|
||||
type = 'QueryFinish';
|
||||
")"
|
||||
|
||||
# Non retriable errors
|
||||
if [[ $FileSync -ne 7 ]]; then
|
||||
exit 2
|
||||
fi
|
||||
# Check that all files was synced
|
||||
if [[ $FileSync -ne $FileOpen ]]; then
|
||||
exit 3
|
||||
fi
|
||||
if [[ $DirectorySync -ne 2 ]]; then
|
||||
exit 4
|
||||
fi
|
||||
|
||||
# Retriable errors
|
||||
if [[ $FileSyncElapsedMicroseconds -eq 0 ]]; then
|
||||
continue
|
||||
fi
|
||||
if [[ $DirectorySyncElapsedMicroseconds -eq 0 ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# Everything is OK
|
||||
ret=0
|
||||
break
|
||||
done
|
||||
|
||||
$CLICKHOUSE_CLIENT -q "drop table data_fsync_pe"
|
||||
|
||||
exit $ret
|
Loading…
Reference in New Issue
Block a user