mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-24 00:22:29 +00:00
Merge branch 'master' into dt64-neg-subseconds
This commit is contained in:
commit
12bd2c0e3f
@ -62,6 +62,7 @@ Checks: '*,
|
||||
|
||||
-google-build-using-namespace,
|
||||
-google-readability-braces-around-statements,
|
||||
-google-readability-casting,
|
||||
-google-readability-function-size,
|
||||
-google-readability-namespace-comments,
|
||||
-google-readability-todo,
|
||||
|
72
.github/workflows/master.yml
vendored
72
.github/workflows/master.yml
vendored
@ -215,8 +215,8 @@ jobs:
|
||||
fetch-depth: 0 # For a proper version and performance artifacts
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -259,8 +259,8 @@ jobs:
|
||||
fetch-depth: 0 # For a proper version and performance artifacts
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -305,8 +305,8 @@ jobs:
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -350,8 +350,8 @@ jobs:
|
||||
# uses: actions/checkout@v2
|
||||
# - name: Build
|
||||
# run: |
|
||||
# git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
# git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
# git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
# git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
# sudo rm -fr "$TEMP_PATH"
|
||||
# mkdir -p "$TEMP_PATH"
|
||||
# cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -395,8 +395,8 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -440,8 +440,8 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -485,8 +485,8 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -530,8 +530,8 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -575,8 +575,8 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -623,8 +623,8 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -668,8 +668,8 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -715,8 +715,8 @@ jobs:
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -762,8 +762,8 @@ jobs:
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -809,8 +809,8 @@ jobs:
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -856,8 +856,8 @@ jobs:
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -903,8 +903,8 @@ jobs:
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -2911,7 +2911,7 @@ jobs:
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/performance_comparison
|
||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||
CHECK_NAME=Performance Comparison (actions)
|
||||
CHECK_NAME=Performance Comparison
|
||||
REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse
|
||||
RUN_BY_HASH_NUM=0
|
||||
RUN_BY_HASH_TOTAL=4
|
||||
@ -2949,7 +2949,7 @@ jobs:
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/performance_comparison
|
||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||
CHECK_NAME=Performance Comparison (actions)
|
||||
CHECK_NAME=Performance Comparison
|
||||
REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse
|
||||
RUN_BY_HASH_NUM=1
|
||||
RUN_BY_HASH_TOTAL=4
|
||||
@ -2987,7 +2987,7 @@ jobs:
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/performance_comparison
|
||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||
CHECK_NAME=Performance Comparison (actions)
|
||||
CHECK_NAME=Performance Comparison
|
||||
REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse
|
||||
RUN_BY_HASH_NUM=2
|
||||
RUN_BY_HASH_TOTAL=4
|
||||
@ -3025,7 +3025,7 @@ jobs:
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/performance_comparison
|
||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||
CHECK_NAME=Performance Comparison (actions)
|
||||
CHECK_NAME=Performance Comparison
|
||||
REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse
|
||||
RUN_BY_HASH_NUM=3
|
||||
RUN_BY_HASH_TOTAL=4
|
||||
|
7
.github/workflows/nightly.yml
vendored
7
.github/workflows/nightly.yml
vendored
@ -81,7 +81,6 @@ jobs:
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
BUILD_NAME=coverity
|
||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
||||
CHECK_NAME=ClickHouse build check (actions)
|
||||
IMAGES_PATH=${{runner.temp}}/images_path
|
||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
||||
TEMP_PATH=${{runner.temp}}/build_check
|
||||
@ -99,13 +98,15 @@ jobs:
|
||||
id: coverity-checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: 'true'
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
|
||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||
- name: Upload Coverity Analysis
|
||||
if: ${{ success() || failure() }}
|
||||
run: |
|
||||
|
228
.github/workflows/pull_request.yml
vendored
228
.github/workflows/pull_request.yml
vendored
@ -277,8 +277,8 @@ jobs:
|
||||
fetch-depth: 0 # for performance artifact
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -322,8 +322,8 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -367,8 +367,8 @@ jobs:
|
||||
# uses: actions/checkout@v2
|
||||
# - name: Build
|
||||
# run: |
|
||||
# git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
# git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
# git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
# git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
# sudo rm -fr "$TEMP_PATH"
|
||||
# mkdir -p "$TEMP_PATH"
|
||||
# cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -414,8 +414,8 @@ jobs:
|
||||
fetch-depth: 0 # for performance artifact
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -459,8 +459,8 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -504,8 +504,8 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -549,8 +549,8 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -594,8 +594,8 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -639,8 +639,8 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -687,8 +687,8 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -732,8 +732,8 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -777,8 +777,8 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -822,8 +822,8 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -867,8 +867,8 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -912,8 +912,8 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -957,8 +957,8 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -3127,7 +3127,7 @@ jobs:
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/performance_comparison
|
||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||
CHECK_NAME=Performance Comparison (actions)
|
||||
CHECK_NAME=Performance Comparison
|
||||
REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse
|
||||
RUN_BY_HASH_NUM=0
|
||||
RUN_BY_HASH_TOTAL=4
|
||||
@ -3165,7 +3165,7 @@ jobs:
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/performance_comparison
|
||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||
CHECK_NAME=Performance Comparison (actions)
|
||||
CHECK_NAME=Performance Comparison
|
||||
REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse
|
||||
RUN_BY_HASH_NUM=1
|
||||
RUN_BY_HASH_TOTAL=4
|
||||
@ -3203,7 +3203,7 @@ jobs:
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/performance_comparison
|
||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||
CHECK_NAME=Performance Comparison (actions)
|
||||
CHECK_NAME=Performance Comparison
|
||||
REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse
|
||||
RUN_BY_HASH_NUM=2
|
||||
RUN_BY_HASH_TOTAL=4
|
||||
@ -3241,7 +3241,159 @@ jobs:
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/performance_comparison
|
||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||
CHECK_NAME=Performance Comparison (actions)
|
||||
CHECK_NAME=Performance Comparison
|
||||
REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse
|
||||
RUN_BY_HASH_NUM=3
|
||||
RUN_BY_HASH_TOTAL=4
|
||||
EOF
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{ env.REPORTS_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Performance Comparison
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci"
|
||||
python3 performance_comparison_check.py "$CHECK_NAME"
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
# shellcheck disable=SC2046
|
||||
docker kill $(docker ps -q) ||:
|
||||
# shellcheck disable=SC2046
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
PerformanceComparisonAarch0:
|
||||
needs: [BuilderDebAarch64]
|
||||
runs-on: [self-hosted, func-tester-aarch64]
|
||||
steps:
|
||||
- name: Set envs
|
||||
run: |
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/performance_comparison
|
||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||
CHECK_NAME=Performance Comparison Aarch64
|
||||
REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse
|
||||
RUN_BY_HASH_NUM=0
|
||||
RUN_BY_HASH_TOTAL=4
|
||||
EOF
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{ env.REPORTS_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Performance Comparison
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci"
|
||||
python3 performance_comparison_check.py "$CHECK_NAME"
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
# shellcheck disable=SC2046
|
||||
docker kill $(docker ps -q) ||:
|
||||
# shellcheck disable=SC2046
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
PerformanceComparisonAarch1:
|
||||
needs: [BuilderDebAarch64]
|
||||
runs-on: [self-hosted, func-tester-aarch64]
|
||||
steps:
|
||||
- name: Set envs
|
||||
run: |
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/performance_comparison
|
||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||
CHECK_NAME=Performance Comparison Aarch64
|
||||
REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse
|
||||
RUN_BY_HASH_NUM=1
|
||||
RUN_BY_HASH_TOTAL=4
|
||||
EOF
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{ env.REPORTS_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Performance Comparison
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci"
|
||||
python3 performance_comparison_check.py "$CHECK_NAME"
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
# shellcheck disable=SC2046
|
||||
docker kill $(docker ps -q) ||:
|
||||
# shellcheck disable=SC2046
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
PerformanceComparisonAarch2:
|
||||
needs: [BuilderDebAarch64]
|
||||
runs-on: [self-hosted, func-tester-aarch64]
|
||||
steps:
|
||||
- name: Set envs
|
||||
run: |
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/performance_comparison
|
||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||
CHECK_NAME=Performance Comparison Aarch64
|
||||
REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse
|
||||
RUN_BY_HASH_NUM=2
|
||||
RUN_BY_HASH_TOTAL=4
|
||||
EOF
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{ env.REPORTS_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Performance Comparison
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci"
|
||||
python3 performance_comparison_check.py "$CHECK_NAME"
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
# shellcheck disable=SC2046
|
||||
docker kill $(docker ps -q) ||:
|
||||
# shellcheck disable=SC2046
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
PerformanceComparisonAarch3:
|
||||
needs: [BuilderDebAarch64]
|
||||
runs-on: [self-hosted, func-tester-aarch64]
|
||||
steps:
|
||||
- name: Set envs
|
||||
run: |
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/performance_comparison
|
||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||
CHECK_NAME=Performance Comparison Aarch64
|
||||
REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse
|
||||
RUN_BY_HASH_NUM=3
|
||||
RUN_BY_HASH_TOTAL=4
|
||||
@ -3333,6 +3485,10 @@ jobs:
|
||||
- PerformanceComparison1
|
||||
- PerformanceComparison2
|
||||
- PerformanceComparison3
|
||||
- PerformanceComparisonAarch0
|
||||
- PerformanceComparisonAarch1
|
||||
- PerformanceComparisonAarch2
|
||||
- PerformanceComparisonAarch3
|
||||
- UnitTestsAsan
|
||||
- UnitTestsTsan
|
||||
- UnitTestsMsan
|
||||
|
6
.gitmodules
vendored
6
.gitmodules
vendored
@ -79,10 +79,10 @@
|
||||
url = https://github.com/ClickHouse/snappy.git
|
||||
[submodule "contrib/cppkafka"]
|
||||
path = contrib/cppkafka
|
||||
url = https://github.com/ClickHouse/cppkafka.git
|
||||
url = https://github.com/mfontanini/cppkafka.git
|
||||
[submodule "contrib/brotli"]
|
||||
path = contrib/brotli
|
||||
url = https://github.com/ClickHouse/brotli.git
|
||||
url = https://github.com/google/brotli.git
|
||||
[submodule "contrib/h3"]
|
||||
path = contrib/h3
|
||||
url = https://github.com/ClickHouse/h3
|
||||
@ -144,7 +144,7 @@
|
||||
ignore = untracked
|
||||
[submodule "contrib/msgpack-c"]
|
||||
path = contrib/msgpack-c
|
||||
url = https://github.com/ClickHouse/msgpack-c
|
||||
url = https://github.com/msgpack/msgpack-c
|
||||
[submodule "contrib/libcpuid"]
|
||||
path = contrib/libcpuid
|
||||
url = https://github.com/ClickHouse/libcpuid.git
|
||||
|
@ -13,9 +13,7 @@ max-statements=200
|
||||
ignore-long-lines = (# )?<?https?://\S+>?$
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
disable = bad-continuation,
|
||||
missing-docstring,
|
||||
bad-whitespace,
|
||||
disable = missing-docstring,
|
||||
too-few-public-methods,
|
||||
invalid-name,
|
||||
too-many-arguments,
|
||||
|
@ -4,7 +4,7 @@
|
||||
**[ClickHouse release v22.3-lts, 2022-03-17](#223)**<br>
|
||||
**[ClickHouse release v22.2, 2022-02-17](#222)**<br>
|
||||
**[ClickHouse release v22.1, 2022-01-18](#221)**<br>
|
||||
**[Changelog for 2021](https://github.com/ClickHouse/ClickHouse/blob/master/docs/en/whats-new/changelog/2021.md)**<br>
|
||||
**[Changelog for 2021](https://clickhouse.com/docs/en/whats-new/changelog/2021/)**<br>
|
||||
|
||||
### <a id="225"></a> ClickHouse release 22.5, 2022-05-19
|
||||
|
||||
@ -172,7 +172,7 @@
|
||||
|
||||
#### Backward Incompatible Change
|
||||
|
||||
* Do not allow SETTINGS after FORMAT for INSERT queries (there is compatibility setting `parser_settings_after_format_compact` to accept such queries, but it is turned OFF by default). [#35883](https://github.com/ClickHouse/ClickHouse/pull/35883) ([Azat Khuzhin](https://github.com/azat)).
|
||||
* Do not allow SETTINGS after FORMAT for INSERT queries (there is compatibility setting `allow_settings_after_format_in_insert` to accept such queries, but it is turned OFF by default). [#35883](https://github.com/ClickHouse/ClickHouse/pull/35883) ([Azat Khuzhin](https://github.com/azat)).
|
||||
* Function `yandexConsistentHash` (consistent hashing algorithm by Konstantin "kostik" Oblakov) is renamed to `kostikConsistentHash`. The old name is left as an alias for compatibility. Although this change is backward compatible, we may remove the alias in subsequent releases, that's why it's recommended to update the usages of this function in your apps. [#35553](https://github.com/ClickHouse/ClickHouse/pull/35553) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||
|
||||
#### New Feature
|
||||
@ -801,4 +801,4 @@
|
||||
* Fix hang up with command `DROP TABLE system.query_log sync`. [#33293](https://github.com/ClickHouse/ClickHouse/pull/33293) ([zhanghuajie](https://github.com/zhanghuajieHIT)).
|
||||
|
||||
|
||||
## [Changelog for 2021](https://github.com/ClickHouse/ClickHouse/blob/master/docs/en/whats-new/changelog/2021.md)
|
||||
## [Changelog for 2021](https://clickhouse.com/docs/en/whats-new/changelog/2021)
|
||||
|
@ -36,7 +36,7 @@ set_property(GLOBAL PROPERTY USE_FOLDERS ON)
|
||||
|
||||
# Check that submodules are present
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/sysroot/README.md")
|
||||
message (FATAL_ERROR "Submodules are not initialized. Run\n\tgit submodule update --init --recursive")
|
||||
message (FATAL_ERROR "Submodules are not initialized. Run\n\tgit submodule update --init")
|
||||
endif ()
|
||||
|
||||
# Take care to add prlimit in command line before ccache, or else ccache thinks that
|
||||
|
@ -15,25 +15,33 @@
|
||||
if (NOT DEFINED ENV{CLION_IDE} AND NOT DEFINED ENV{XCODE_IDE})
|
||||
find_program(NINJA_PATH ninja)
|
||||
if (NINJA_PATH)
|
||||
set(CMAKE_GENERATOR "Ninja" CACHE INTERNAL "" FORCE)
|
||||
set(CMAKE_GENERATOR "Ninja" CACHE INTERNAL "")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
# Check if environment is polluted.
|
||||
if (NOT $ENV{CFLAGS} STREQUAL ""
|
||||
OR NOT $ENV{CXXFLAGS} STREQUAL ""
|
||||
OR NOT $ENV{LDFLAGS} STREQUAL ""
|
||||
if (NOT "$ENV{CFLAGS}" STREQUAL ""
|
||||
OR NOT "$ENV{CXXFLAGS}" STREQUAL ""
|
||||
OR NOT "$ENV{LDFLAGS}" STREQUAL ""
|
||||
OR CMAKE_C_FLAGS OR CMAKE_CXX_FLAGS OR CMAKE_EXE_LINKER_FLAGS OR CMAKE_SHARED_LINKER_FLAGS OR CMAKE_MODULE_LINKER_FLAGS
|
||||
OR CMAKE_C_FLAGS_INIT OR CMAKE_CXX_FLAGS_INIT OR CMAKE_EXE_LINKER_FLAGS_INIT OR CMAKE_SHARED_LINKER_FLAGS_INIT OR CMAKE_MODULE_LINKER_FLAGS_INIT)
|
||||
|
||||
# if $ENV
|
||||
message("CFLAGS: $ENV{CFLAGS}")
|
||||
message("CXXFLAGS: $ENV{CXXFLAGS}")
|
||||
message("LDFLAGS: $ENV{LDFLAGS}")
|
||||
# if *_FLAGS
|
||||
message("CMAKE_C_FLAGS: ${CMAKE_C_FLAGS}")
|
||||
message("CMAKE_CXX_FLAGS: ${CMAKE_CXX_FLAGS}")
|
||||
message("CMAKE_EXE_LINKER_FLAGS: ${CMAKE_EXE_LINKER_FLAGS}")
|
||||
message("CMAKE_SHARED_LINKER_FLAGS: ${CMAKE_SHARED_LINKER_FLAGS}")
|
||||
message("CMAKE_MODULE_LINKER_FLAGS: ${CMAKE_MODULE_LINKER_FLAGS}")
|
||||
# if *_FLAGS_INIT
|
||||
message("CMAKE_C_FLAGS_INIT: ${CMAKE_C_FLAGS_INIT}")
|
||||
message("CMAKE_CXX_FLAGS_INIT: ${CMAKE_CXX_FLAGS_INIT}")
|
||||
message("CMAKE_EXE_LINKER_FLAGS_INIT: ${CMAKE_EXE_LINKER_FLAGS_INIT}")
|
||||
message("CMAKE_SHARED_LINKER_FLAGS_INIT: ${CMAKE_SHARED_LINKER_FLAGS_INIT}")
|
||||
message("CMAKE_MODULE_LINKER_FLAGS_INIT: ${CMAKE_MODULE_LINKER_FLAGS_INIT}")
|
||||
|
||||
message(FATAL_ERROR "
|
||||
Some of the variables like CFLAGS, CXXFLAGS, LDFLAGS are not empty.
|
||||
@ -51,17 +59,34 @@ endif()
|
||||
execute_process(COMMAND uname -s OUTPUT_VARIABLE OS)
|
||||
execute_process(COMMAND uname -m OUTPUT_VARIABLE ARCH)
|
||||
|
||||
# By default, prefer clang on Linux
|
||||
# But note, that you still may change the compiler with -DCMAKE_C_COMPILER/-DCMAKE_CXX_COMPILER.
|
||||
if (OS MATCHES "Linux"
|
||||
# some build systems may use CC/CXX env variables
|
||||
AND "$ENV{CC}" STREQUAL ""
|
||||
AND "$ENV{CXX}" STREQUAL "")
|
||||
find_program(CLANG_PATH clang)
|
||||
if (CLANG_PATH)
|
||||
set(CMAKE_C_COMPILER "clang" CACHE INTERNAL "")
|
||||
endif()
|
||||
|
||||
find_program(CLANG_CXX_PATH clang++)
|
||||
if (CLANG_CXX_PATH)
|
||||
set(CMAKE_CXX_COMPILER "clang++" CACHE INTERNAL "")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (OS MATCHES "Linux"
|
||||
AND NOT DEFINED CMAKE_TOOLCHAIN_FILE
|
||||
AND NOT DISABLE_HERMETIC_BUILD
|
||||
AND ($ENV{CC} MATCHES ".*clang.*" OR CMAKE_C_COMPILER MATCHES ".*clang.*"))
|
||||
AND ("$ENV{CC}" MATCHES ".*clang.*" OR CMAKE_C_COMPILER MATCHES ".*clang.*"))
|
||||
|
||||
if (ARCH MATCHES "amd64|x86_64")
|
||||
set (CMAKE_TOOLCHAIN_FILE "cmake/linux/toolchain-x86_64.cmake" CACHE INTERNAL "" FORCE)
|
||||
set (CMAKE_TOOLCHAIN_FILE "cmake/linux/toolchain-x86_64.cmake" CACHE INTERNAL "")
|
||||
elseif (ARCH MATCHES "^(aarch64.*|AARCH64.*|arm64.*|ARM64.*)")
|
||||
set (CMAKE_TOOLCHAIN_FILE "cmake/linux/toolchain-aarch64.cmake" CACHE INTERNAL "" FORCE)
|
||||
set (CMAKE_TOOLCHAIN_FILE "cmake/linux/toolchain-aarch64.cmake" CACHE INTERNAL "")
|
||||
elseif (ARCH MATCHES "^(ppc64le.*|PPC64LE.*)")
|
||||
set (CMAKE_TOOLCHAIN_FILE "cmake/linux/toolchain-ppc64le.cmake" CACHE INTERNAL "" FORCE)
|
||||
set (CMAKE_TOOLCHAIN_FILE "cmake/linux/toolchain-ppc64le.cmake" CACHE INTERNAL "")
|
||||
else ()
|
||||
message (FATAL_ERROR "Unsupported architecture: ${ARCH}")
|
||||
endif ()
|
||||
|
@ -17,15 +17,12 @@ set (SRCS
|
||||
sleep.cpp
|
||||
terminalColors.cpp
|
||||
errnoToString.cpp
|
||||
ReplxxLineReader.cpp
|
||||
StringRef.cpp
|
||||
safeExit.cpp
|
||||
throwError.cpp
|
||||
)
|
||||
|
||||
if (ENABLE_REPLXX)
|
||||
list (APPEND SRCS ReplxxLineReader.cpp)
|
||||
endif ()
|
||||
|
||||
if (USE_DEBUG_HELPERS)
|
||||
get_target_property(MAGIC_ENUM_INCLUDE_DIR ch_contrib::magic_enum INTERFACE_INCLUDE_DIRECTORIES)
|
||||
# CMake generator expression will do insane quoting when it encounters special character like quotes, spaces, etc.
|
||||
|
@ -45,14 +45,16 @@ std::string replxx_now_ms_str()
|
||||
time_t t = ms.count() / 1000;
|
||||
tm broken;
|
||||
if (!localtime_r(&t, &broken))
|
||||
{
|
||||
return std::string();
|
||||
}
|
||||
return {};
|
||||
|
||||
static int const BUFF_SIZE(32);
|
||||
char str[BUFF_SIZE];
|
||||
strftime(str, BUFF_SIZE, "%Y-%m-%d %H:%M:%S.", &broken);
|
||||
snprintf(str + sizeof("YYYY-mm-dd HH:MM:SS"), 5, "%03d", static_cast<int>(ms.count() % 1000));
|
||||
if (strftime(str, BUFF_SIZE, "%Y-%m-%d %H:%M:%S.", &broken) <= 0)
|
||||
return {};
|
||||
|
||||
if (snprintf(str + sizeof("YYYY-mm-dd HH:MM:SS"), 5, "%03d", static_cast<int>(ms.count() % 1000)) <= 0)
|
||||
return {};
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
|
@ -9,7 +9,7 @@ std::string errnoToString(int code, int the_errno)
|
||||
char buf[buf_size];
|
||||
#ifndef _GNU_SOURCE
|
||||
int rc = strerror_r(the_errno, buf, buf_size);
|
||||
#ifdef __APPLE__
|
||||
#ifdef OS_DARWIN
|
||||
if (rc != 0 && rc != EINVAL)
|
||||
#else
|
||||
if (rc != 0)
|
||||
|
@ -16,7 +16,7 @@ uint64_t getAvailableMemoryAmountOrZero()
|
||||
{
|
||||
#if defined(_SC_PHYS_PAGES) // linux
|
||||
return getPageSize() * sysconf(_SC_PHYS_PAGES);
|
||||
#elif defined(__FreeBSD__)
|
||||
#elif defined(OS_FREEBSD)
|
||||
struct vmtotal vmt;
|
||||
size_t vmt_size = sizeof(vmt);
|
||||
if (sysctlbyname("vm.vmtotal", &vmt, &vmt_size, NULL, 0) == 0)
|
||||
|
@ -6,7 +6,7 @@
|
||||
|
||||
#include <base/defines.h>
|
||||
|
||||
#if defined(__linux__) && !defined(THREAD_SANITIZER) && !defined(USE_MUSL)
|
||||
#if defined(OS_LINUX) && !defined(THREAD_SANITIZER) && !defined(USE_MUSL)
|
||||
#define USE_PHDR_CACHE 1
|
||||
#endif
|
||||
|
||||
|
@ -576,8 +576,8 @@ private:
|
||||
else if constexpr (Bits == 128 && sizeof(base_type) == 8)
|
||||
{
|
||||
using CompilerUInt128 = unsigned __int128;
|
||||
CompilerUInt128 a = (CompilerUInt128(lhs.items[1]) << 64) + lhs.items[0];
|
||||
CompilerUInt128 b = (CompilerUInt128(rhs.items[1]) << 64) + rhs.items[0];
|
||||
CompilerUInt128 a = (CompilerUInt128(lhs.items[1]) << 64) + lhs.items[0]; // NOLINT(clang-analyzer-core.UndefinedBinaryOperatorResult)
|
||||
CompilerUInt128 b = (CompilerUInt128(rhs.items[1]) << 64) + rhs.items[0]; // NOLINT(clang-analyzer-core.UndefinedBinaryOperatorResult)
|
||||
CompilerUInt128 c = a * b;
|
||||
integer<Bits, Signed> res;
|
||||
res.items[0] = c;
|
||||
@ -841,8 +841,8 @@ public:
|
||||
{
|
||||
using CompilerUInt128 = unsigned __int128;
|
||||
|
||||
CompilerUInt128 a = (CompilerUInt128(numerator.items[1]) << 64) + numerator.items[0];
|
||||
CompilerUInt128 b = (CompilerUInt128(denominator.items[1]) << 64) + denominator.items[0];
|
||||
CompilerUInt128 a = (CompilerUInt128(numerator.items[1]) << 64) + numerator.items[0]; // NOLINT(clang-analyzer-core.UndefinedBinaryOperatorResult)
|
||||
CompilerUInt128 b = (CompilerUInt128(denominator.items[1]) << 64) + denominator.items[0]; // NOLINT(clang-analyzer-core.UndefinedBinaryOperatorResult)
|
||||
CompilerUInt128 c = a / b; // NOLINT
|
||||
|
||||
integer<Bits, Signed> res;
|
||||
@ -1204,7 +1204,7 @@ constexpr integer<Bits, Signed>::operator T() const noexcept
|
||||
|
||||
UnsignedT res{};
|
||||
for (unsigned i = 0; i < _impl::item_count && i < (sizeof(T) + sizeof(base_type) - 1) / sizeof(base_type); ++i)
|
||||
res += UnsignedT(items[i]) << (sizeof(base_type) * 8 * i);
|
||||
res += UnsignedT(items[i]) << (sizeof(base_type) * 8 * i); // NOLINT(clang-analyzer-core.UndefinedBinaryOperatorResult)
|
||||
|
||||
return res;
|
||||
}
|
||||
|
@ -77,6 +77,7 @@ if (OS_LINUX AND NOT LINKER_NAME)
|
||||
|
||||
if (NOT LINKER_NAME)
|
||||
if (GOLD_PATH)
|
||||
message (WARNING "Linking with gold is not recommended. Please use lld.")
|
||||
if (COMPILER_GCC)
|
||||
set (LINKER_NAME "gold")
|
||||
else ()
|
||||
|
@ -1,7 +1,4 @@
|
||||
set(ABSL_ROOT_DIR "${ClickHouse_SOURCE_DIR}/contrib/abseil-cpp")
|
||||
if(NOT EXISTS "${ABSL_ROOT_DIR}/CMakeLists.txt")
|
||||
message(FATAL_ERROR " submodule third_party/abseil-cpp is missing. To fix try run: \n git submodule update --init --recursive")
|
||||
endif()
|
||||
set(BUILD_TESTING OFF)
|
||||
set(ABSL_PROPAGATE_CXX_STD ON)
|
||||
add_subdirectory("${ABSL_ROOT_DIR}" "${ClickHouse_BINARY_DIR}/contrib/abseil-cpp")
|
||||
|
@ -5,6 +5,7 @@ if (NOT ENABLE_AMQPCPP)
|
||||
return()
|
||||
endif()
|
||||
|
||||
# can be removed once libuv build on MacOS with GCC is possible
|
||||
if (NOT TARGET ch_contrib::uv)
|
||||
message(STATUS "Not using AMQP-CPP because libuv is disabled")
|
||||
return()
|
||||
@ -37,21 +38,6 @@ set (SRCS
|
||||
|
||||
add_library(_amqp-cpp ${SRCS})
|
||||
|
||||
target_compile_options (_amqp-cpp
|
||||
PRIVATE
|
||||
-Wno-old-style-cast
|
||||
-Wno-inconsistent-missing-destructor-override
|
||||
-Wno-deprecated
|
||||
-Wno-unused-parameter
|
||||
-Wno-shadow
|
||||
-Wno-tautological-type-limit-compare
|
||||
-Wno-extra-semi
|
||||
# NOTE: disable all warnings at last because the warning:
|
||||
# "conversion function converting 'XXX' to itself will never be used"
|
||||
# doesn't have it's own diagnostic flag yet.
|
||||
-w
|
||||
)
|
||||
|
||||
target_include_directories (_amqp-cpp SYSTEM BEFORE PUBLIC "${LIBRARY_DIR}/include" "${LIBRARY_DIR}")
|
||||
target_link_libraries (_amqp-cpp PUBLIC OpenSSL::Crypto OpenSSL::SSL ch_contrib::uv)
|
||||
add_library (ch_contrib::amqp_cpp ALIAS _amqp-cpp)
|
||||
|
2
contrib/arrow
vendored
2
contrib/arrow
vendored
@ -1 +1 @@
|
||||
Subproject commit 6f274b737c66a6c39bab0d3bdf6cf7d139ef06f5
|
||||
Subproject commit efdcd015cfdee1b6aa349c9ca227ca12c3d697f5
|
@ -20,7 +20,7 @@ endif()
|
||||
option (ENABLE_PARQUET "Enable parquet" ${ENABLE_PARQUET_DEFAULT})
|
||||
|
||||
if (NOT ENABLE_PARQUET)
|
||||
message(STATUS "Building without Parquet support")
|
||||
message(STATUS "Not using parquet")
|
||||
return()
|
||||
endif()
|
||||
|
||||
|
@ -60,14 +60,6 @@ target_compile_definitions (_avrocpp PUBLIC SNAPPY_CODEC_AVAILABLE)
|
||||
target_include_directories (_avrocpp PRIVATE ${SNAPPY_INCLUDE_DIR})
|
||||
target_link_libraries (_avrocpp PRIVATE ch_contrib::snappy)
|
||||
|
||||
if (COMPILER_GCC)
|
||||
set (SUPPRESS_WARNINGS -Wno-non-virtual-dtor)
|
||||
elseif (COMPILER_CLANG)
|
||||
set (SUPPRESS_WARNINGS -Wno-non-virtual-dtor)
|
||||
endif ()
|
||||
|
||||
target_compile_options(_avrocpp PRIVATE ${SUPPRESS_WARNINGS})
|
||||
|
||||
# create a symlink to include headers with <avro/...>
|
||||
set(AVRO_INCLUDE_DIR "${CMAKE_CURRENT_BINARY_DIR}/include")
|
||||
ADD_CUSTOM_TARGET(avro_symlink_headers ALL
|
||||
|
@ -52,20 +52,6 @@ include("${AZURE_DIR}/cmake-modules/AzureTransportAdapters.cmake")
|
||||
|
||||
add_library(_azure_sdk ${AZURE_SDK_UNIFIED_SRC})
|
||||
|
||||
if (COMPILER_CLANG)
|
||||
target_compile_options(_azure_sdk PRIVATE
|
||||
-Wno-deprecated-copy-dtor
|
||||
-Wno-extra-semi
|
||||
-Wno-suggest-destructor-override
|
||||
-Wno-inconsistent-missing-destructor-override
|
||||
-Wno-error=unknown-warning-option
|
||||
)
|
||||
|
||||
if (CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 13)
|
||||
target_compile_options(_azure_sdk PRIVATE -Wno-reserved-identifier)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Originally, on Windows azure-core is built with bcrypt and crypt32 by default
|
||||
if (TARGET OpenSSL::SSL)
|
||||
target_link_libraries(_azure_sdk PRIVATE OpenSSL::Crypto OpenSSL::SSL)
|
||||
|
@ -1,7 +1,12 @@
|
||||
# Needed for:
|
||||
# - securely connecting to an external server, e.g. clickhouse-client --host ... --secure
|
||||
# - lots of thirdparty libraries
|
||||
option(ENABLE_SSL "Enable ssl" ${ENABLE_LIBRARIES})
|
||||
|
||||
# Actually, so many 3rd party libraries + unit tests need SSL that we cannot disable it
|
||||
# without breaking the build ...
|
||||
option(ENABLE_SSL "Enable ssl" ON) # breaks if OFF
|
||||
# TODO: Making SSL dependent on ENABLE_LIBRARIES is desirable but needs fixing dependent libs + tests.
|
||||
# option(ENABLE_SSL "Enable ssl" ${ENABLE_LIBRARIES})
|
||||
|
||||
if(NOT ENABLE_SSL)
|
||||
message(STATUS "Not using openssl")
|
||||
@ -700,3 +705,109 @@ target_compile_options(_crypto PRIVATE -Wno-gnu-anonymous-struct)
|
||||
|
||||
add_library(OpenSSL::Crypto ALIAS _crypto)
|
||||
add_library(OpenSSL::SSL ALIAS _ssl)
|
||||
|
||||
# Helper function used in the populate_openssl_vars function below
|
||||
function(from_hex HEX DEC)
|
||||
string(TOUPPER "${HEX}" HEX)
|
||||
set(_res 0)
|
||||
string(LENGTH "${HEX}" _strlen)
|
||||
|
||||
while (_strlen GREATER 0)
|
||||
math(EXPR _res "${_res} * 16")
|
||||
string(SUBSTRING "${HEX}" 0 1 NIBBLE)
|
||||
string(SUBSTRING "${HEX}" 1 -1 HEX)
|
||||
if (NIBBLE STREQUAL "A")
|
||||
math(EXPR _res "${_res} + 10")
|
||||
elseif (NIBBLE STREQUAL "B")
|
||||
math(EXPR _res "${_res} + 11")
|
||||
elseif (NIBBLE STREQUAL "C")
|
||||
math(EXPR _res "${_res} + 12")
|
||||
elseif (NIBBLE STREQUAL "D")
|
||||
math(EXPR _res "${_res} + 13")
|
||||
elseif (NIBBLE STREQUAL "E")
|
||||
math(EXPR _res "${_res} + 14")
|
||||
elseif (NIBBLE STREQUAL "F")
|
||||
math(EXPR _res "${_res} + 15")
|
||||
else ()
|
||||
math(EXPR _res "${_res} + ${NIBBLE}")
|
||||
endif ()
|
||||
|
||||
string(LENGTH "${HEX}" _strlen)
|
||||
endwhile ()
|
||||
|
||||
set(${DEC} ${_res} PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
# ClickHouse uses BoringSSL which is a fork of OpenSSL.
|
||||
# This populates CMAKE var OPENSSL_VERSION from the OPENSSL_VERSION_NUMBER defined
|
||||
# in contrib/boringssl/include/openssl/base.h. It also sets the CMAKE var OPENSSL_IS_BORING_SSL
|
||||
# if it's defined in the file. Both OPENSSL_VERSION and OPENSSL_IS_BORING_SSL variables will be
|
||||
# used to populate flags in the `system.build_options` table for more context on ssl version used.
|
||||
# This cmake script is adopted from FindOpenSSL cmake module and slightly modified for this use-case .
|
||||
if (EXISTS "${BORINGSSL_SOURCE_DIR}/include/openssl/base.h")
|
||||
file(STRINGS "${BORINGSSL_SOURCE_DIR}/include/openssl/base.h" openssl_version_str
|
||||
REGEX "^#[\t ]*define[\t ]+OPENSSL_VERSION_NUMBER[\t ]+0x([0-9a-fA-F])+.*")
|
||||
|
||||
file(STRINGS "${BORINGSSL_SOURCE_DIR}/include/openssl/base.h" openssl_is_boringssl
|
||||
REGEX "^#[\t ]*define[\t ]+OPENSSL_IS_BORINGSSL.*")
|
||||
|
||||
# Set to true if OPENSSL_IS_BORING_SSL is defined
|
||||
if (openssl_is_boringssl)
|
||||
set(OPENSSL_IS_BORING_SSL 1)
|
||||
endif ()
|
||||
|
||||
# If openssl_version_str is defined extrapolate and set OPENSSL_VERSION
|
||||
if (openssl_version_str)
|
||||
# The version number is encoded as 0xMNNFFPPS: major minor fix patch status
|
||||
# The status gives if this is a developer or prerelease and is ignored here.
|
||||
# Major, minor, and fix directly translate into the version numbers shown in
|
||||
# the string. The patch field translates to the single character suffix that
|
||||
# indicates the bug fix state, which 00 -> nothing, 01 -> a, 02 -> b and so
|
||||
# on.
|
||||
|
||||
string(REGEX REPLACE "^.*OPENSSL_VERSION_NUMBER[\t ]+0x([0-9a-fA-F])([0-9a-fA-F][0-9a-fA-F])([0-9a-fA-F][0-9a-fA-F])([0-9a-fA-F][0-9a-fA-F])([0-9a-fA-F]).*$"
|
||||
"\\1;\\2;\\3;\\4;\\5" OPENSSL_VERSION_LIST "${openssl_version_str}")
|
||||
list(GET OPENSSL_VERSION_LIST 0 OPENSSL_VERSION_MAJOR)
|
||||
list(GET OPENSSL_VERSION_LIST 1 OPENSSL_VERSION_MINOR)
|
||||
from_hex("${OPENSSL_VERSION_MINOR}" OPENSSL_VERSION_MINOR)
|
||||
list(GET OPENSSL_VERSION_LIST 2 OPENSSL_VERSION_FIX)
|
||||
from_hex("${OPENSSL_VERSION_FIX}" OPENSSL_VERSION_FIX)
|
||||
list(GET OPENSSL_VERSION_LIST 3 OPENSSL_VERSION_PATCH)
|
||||
|
||||
if (NOT OPENSSL_VERSION_PATCH STREQUAL "00")
|
||||
from_hex("${OPENSSL_VERSION_PATCH}" _tmp)
|
||||
# 96 is the ASCII code of 'a' minus 1
|
||||
math(EXPR OPENSSL_VERSION_PATCH_ASCII "${_tmp} + 96")
|
||||
unset(_tmp)
|
||||
# Once anyone knows how OpenSSL would call the patch versions beyond 'z'
|
||||
# this should be updated to handle that, too. This has not happened yet
|
||||
# so it is simply ignored here for now.
|
||||
string(ASCII "${OPENSSL_VERSION_PATCH_ASCII}" OPENSSL_VERSION_PATCH_STRING)
|
||||
endif ()
|
||||
|
||||
set(OPENSSL_VERSION "${OPENSSL_VERSION_MAJOR}.${OPENSSL_VERSION_MINOR}.${OPENSSL_VERSION_FIX}${OPENSSL_VERSION_PATCH_STRING}")
|
||||
else ()
|
||||
# Since OpenSSL 3.0.0, the new version format is MAJOR.MINOR.PATCH and
|
||||
# a new OPENSSL_VERSION_STR macro contains exactly that
|
||||
file(STRINGS "${BORINGSSL_SOURCE_DIR}/include/openssl/base.h" OPENSSL_VERSION_STR
|
||||
REGEX "^#[\t ]*define[\t ]+OPENSSL_VERSION_STR[\t ]+\"([0-9])+\\.([0-9])+\\.([0-9])+\".*")
|
||||
string(REGEX REPLACE "^.*OPENSSL_VERSION_STR[\t ]+\"([0-9]+\\.[0-9]+\\.[0-9]+)\".*$"
|
||||
"\\1" OPENSSL_VERSION_STR "${OPENSSL_VERSION_STR}")
|
||||
|
||||
set(OPENSSL_VERSION "${OPENSSL_VERSION_STR}")
|
||||
|
||||
# Setting OPENSSL_VERSION_MAJOR OPENSSL_VERSION_MINOR and OPENSSL_VERSION_FIX
|
||||
string(REGEX MATCHALL "([0-9])+" OPENSSL_VERSION_NUMBER "${OPENSSL_VERSION}")
|
||||
list(POP_FRONT OPENSSL_VERSION_NUMBER
|
||||
OPENSSL_VERSION_MAJOR
|
||||
OPENSSL_VERSION_MINOR
|
||||
OPENSSL_VERSION_FIX)
|
||||
|
||||
unset(OPENSSL_VERSION_NUMBER)
|
||||
unset(OPENSSL_VERSION_STR)
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
# Set CMAKE variables so that they can be referenced properly from everywhere
|
||||
set(OPENSSL_VERSION "${OPENSSL_VERSION}" CACHE INTERNAL "")
|
||||
set(OPENSSL_IS_BORING_SSL "${OPENSSL_IS_BORING_SSL}" CACHE INTERNAL 0)
|
||||
|
2
contrib/brotli
vendored
2
contrib/brotli
vendored
@ -1 +1 @@
|
||||
Subproject commit 5bd78768449751a78d4b4c646b0612917986f5b1
|
||||
Subproject commit 63be8a99401992075c23e99f7c84de1c653e39e2
|
@ -45,7 +45,4 @@ add_library(ch_contrib::brotli ALIAS _brotli)
|
||||
|
||||
target_include_directories(_brotli SYSTEM BEFORE PUBLIC "${BROTLI_SOURCE_DIR}/include")
|
||||
|
||||
if(M_LIBRARY)
|
||||
target_link_libraries(_brotli PRIVATE ${M_LIBRARY})
|
||||
endif()
|
||||
target_compile_definitions(_brotli PRIVATE BROTLI_BUILD_PORTABLE=1)
|
||||
|
@ -1,6 +1,6 @@
|
||||
option(ENABLE_BZIP2 "Enable bzip2 compression support" ${ENABLE_LIBRARIES})
|
||||
if (NOT ENABLE_BZIP2)
|
||||
message (STATUS "bzip2 compression disabled")
|
||||
message (STATUS "Not using bzip2")
|
||||
return()
|
||||
endif()
|
||||
|
||||
@ -26,8 +26,4 @@ configure_file (
|
||||
|
||||
add_library(_bzip2 ${SRCS})
|
||||
add_library(ch_contrib::bzip2 ALIAS _bzip2)
|
||||
# To avoid -Wreserved-id-macro we use SYSTEM:
|
||||
#
|
||||
# clickhouse/contrib/bzip2/bzlib.h:23:9: error: macro name is a reserved identifier [-Werror,-Wreserved-id-macro]
|
||||
# #define _BZLIB_H
|
||||
target_include_directories(_bzip2 SYSTEM BEFORE PUBLIC "${BZIP2_SOURCE_DIR}" "${BZIP2_BINARY_DIR}")
|
||||
|
@ -81,16 +81,12 @@ set (CAPNPC_SRCS
|
||||
add_library(_capnpc ${CAPNPC_SRCS})
|
||||
target_link_libraries(_capnpc PUBLIC _capnp)
|
||||
|
||||
# The library has substandard code
|
||||
if (COMPILER_GCC)
|
||||
set (SUPPRESS_WARNINGS -w)
|
||||
elseif (COMPILER_CLANG)
|
||||
set (SUPPRESS_WARNINGS -w)
|
||||
if (COMPILER_CLANG)
|
||||
set (CAPNP_PRIVATE_CXX_FLAGS -fno-char8_t)
|
||||
endif ()
|
||||
|
||||
target_compile_options(_kj PRIVATE ${SUPPRESS_WARNINGS} ${CAPNP_PRIVATE_CXX_FLAGS})
|
||||
target_compile_options(_capnp PRIVATE ${SUPPRESS_WARNINGS} ${CAPNP_PRIVATE_CXX_FLAGS})
|
||||
target_compile_options(_capnpc PRIVATE ${SUPPRESS_WARNINGS} ${CAPNP_PRIVATE_CXX_FLAGS})
|
||||
target_compile_options(_kj PRIVATE ${CAPNP_PRIVATE_CXX_FLAGS})
|
||||
target_compile_options(_capnp PRIVATE ${CAPNP_PRIVATE_CXX_FLAGS})
|
||||
target_compile_options(_capnpc PRIVATE ${CAPNP_PRIVATE_CXX_FLAGS})
|
||||
|
||||
add_library(ch_contrib::capnp ALIAS _capnpc)
|
||||
|
@ -5,6 +5,7 @@ if (NOT ENABLE_CASSANDRA)
|
||||
return()
|
||||
endif()
|
||||
|
||||
# can be removed once libuv build on MacOS with GCC is possible
|
||||
if (NOT TARGET ch_contrib::uv)
|
||||
message(STATUS "Not using cassandra because libuv is disabled")
|
||||
return()
|
||||
|
2
contrib/cppkafka
vendored
2
contrib/cppkafka
vendored
@ -1 +1 @@
|
||||
Subproject commit 64bd67db12b9c705e9127439a5b05b351d9df7da
|
||||
Subproject commit 5a119f689f8a4d90d10a9635e7ee2bee5c127de1
|
@ -1,5 +1,5 @@
|
||||
if (NOT ENABLE_KAFKA)
|
||||
message(STATUS "Not using librdkafka (skip cppkafka)")
|
||||
message(STATUS "Not using kafka")
|
||||
return()
|
||||
endif()
|
||||
|
||||
|
@ -5,7 +5,7 @@ elseif(ENABLE_FASTOPS)
|
||||
endif()
|
||||
|
||||
if(NOT ENABLE_FASTOPS)
|
||||
message(STATUS "Not using fast vectorized mathematical functions library by Mikhail Parakhin")
|
||||
message(STATUS "Not using fastops")
|
||||
return()
|
||||
endif()
|
||||
|
||||
|
@ -1,22 +1,24 @@
|
||||
set(FMT_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/fmtlib")
|
||||
|
||||
set (SRCS
|
||||
# NOTE: do not build module for now:
|
||||
# ../fmtlib/src/fmt.cc
|
||||
../fmtlib/src/format.cc
|
||||
../fmtlib/src/os.cc
|
||||
${FMT_SOURCE_DIR}/src/format.cc
|
||||
${FMT_SOURCE_DIR}/src/os.cc
|
||||
|
||||
../fmtlib/include/fmt/args.h
|
||||
../fmtlib/include/fmt/chrono.h
|
||||
../fmtlib/include/fmt/color.h
|
||||
../fmtlib/include/fmt/compile.h
|
||||
../fmtlib/include/fmt/core.h
|
||||
../fmtlib/include/fmt/format.h
|
||||
../fmtlib/include/fmt/format-inl.h
|
||||
../fmtlib/include/fmt/locale.h
|
||||
../fmtlib/include/fmt/os.h
|
||||
../fmtlib/include/fmt/ostream.h
|
||||
../fmtlib/include/fmt/printf.h
|
||||
../fmtlib/include/fmt/ranges.h
|
||||
../fmtlib/include/fmt/xchar.h
|
||||
${FMT_SOURCE_DIR}/include/fmt/args.h
|
||||
${FMT_SOURCE_DIR}/include/fmt/chrono.h
|
||||
${FMT_SOURCE_DIR}/include/fmt/color.h
|
||||
${FMT_SOURCE_DIR}/include/fmt/compile.h
|
||||
${FMT_SOURCE_DIR}/include/fmt/core.h
|
||||
${FMT_SOURCE_DIR}/include/fmt/format.h
|
||||
${FMT_SOURCE_DIR}/include/fmt/format-inl.h
|
||||
${FMT_SOURCE_DIR}/include/fmt/locale.h
|
||||
${FMT_SOURCE_DIR}/include/fmt/os.h
|
||||
${FMT_SOURCE_DIR}/include/fmt/ostream.h
|
||||
${FMT_SOURCE_DIR}/include/fmt/printf.h
|
||||
${FMT_SOURCE_DIR}/include/fmt/ranges.h
|
||||
${FMT_SOURCE_DIR}/include/fmt/xchar.h
|
||||
)
|
||||
|
||||
add_library(_fmt ${SRCS})
|
||||
|
@ -34,8 +34,5 @@ add_library(_h3 ${SRCS})
|
||||
target_include_directories(_h3 SYSTEM PUBLIC "${H3_SOURCE_DIR}/include")
|
||||
target_include_directories(_h3 SYSTEM PUBLIC "${H3_BINARY_DIR}/include")
|
||||
target_compile_definitions(_h3 PRIVATE H3_HAVE_VLA)
|
||||
if(M_LIBRARY)
|
||||
target_link_libraries(_h3 PRIVATE ${M_LIBRARY})
|
||||
endif()
|
||||
|
||||
add_library(ch_contrib::h3 ALIAS _h3)
|
||||
|
@ -5,7 +5,7 @@ elseif(ENABLE_HIVE)
|
||||
endif()
|
||||
|
||||
if (NOT ENABLE_HIVE)
|
||||
message("Hive disabled")
|
||||
message(STATUS "Not using hive")
|
||||
return()
|
||||
endif()
|
||||
|
||||
|
@ -481,10 +481,6 @@ target_include_directories(_icui18n SYSTEM PUBLIC "${ICU_SOURCE_DIR}/i18n/")
|
||||
target_compile_definitions(_icuuc PRIVATE -DU_COMMON_IMPLEMENTATION)
|
||||
target_compile_definitions(_icui18n PRIVATE -DU_I18N_IMPLEMENTATION)
|
||||
|
||||
if (COMPILER_CLANG)
|
||||
target_compile_options(_icudata PRIVATE -Wno-unused-command-line-argument)
|
||||
endif ()
|
||||
|
||||
add_library(_icu INTERFACE)
|
||||
target_link_libraries(_icu INTERFACE _icui18n _icuuc _icudata)
|
||||
add_library(ch_contrib::icu ALIAS _icu)
|
||||
|
@ -180,7 +180,6 @@ if (USE_UNWIND)
|
||||
target_link_libraries (_jemalloc PRIVATE unwind)
|
||||
endif ()
|
||||
|
||||
target_compile_options(_jemalloc PRIVATE -Wno-redundant-decls)
|
||||
# for RTLD_NEXT
|
||||
target_compile_options(_jemalloc PRIVATE -D_GNU_SOURCE)
|
||||
|
||||
|
@ -6,7 +6,7 @@ elseif(ENABLE_CPUID)
|
||||
endif()
|
||||
|
||||
if (NOT ENABLE_CPUID)
|
||||
message("Not using cpuid")
|
||||
message(STATUS "Not using cpuid")
|
||||
return()
|
||||
endif()
|
||||
|
||||
@ -27,8 +27,5 @@ add_library (_cpuid ${SRCS})
|
||||
|
||||
target_include_directories (_cpuid SYSTEM PUBLIC "${LIBRARY_DIR}")
|
||||
target_compile_definitions (_cpuid PRIVATE VERSION="v0.4.1")
|
||||
if (COMPILER_CLANG)
|
||||
target_compile_options (_cpuid PRIVATE -Wno-reserved-id-macro)
|
||||
endif ()
|
||||
|
||||
add_library(ch_contrib::cpuid ALIAS _cpuid)
|
||||
|
@ -1,7 +1,7 @@
|
||||
option(ENABLE_GSASL_LIBRARY "Enable gsasl library" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (NOT ENABLE_GSASL_LIBRARY)
|
||||
message(STATUS "Not using gsasl library")
|
||||
message(STATUS "Not using gsasl")
|
||||
return()
|
||||
endif()
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
# once fixed, please remove similar places in CMakeLists of libuv users (search "ch_contrib::uv")
|
||||
if (OS_DARWIN AND COMPILER_GCC)
|
||||
message (WARNING "libuv cannot be built with GCC in macOS due to a bug: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=93082")
|
||||
return()
|
||||
|
2
contrib/libxml2
vendored
2
contrib/libxml2
vendored
@ -1 +1 @@
|
||||
Subproject commit a075d256fd9ff15590b86d981b75a50ead124fca
|
||||
Subproject commit 7846b0a677f8d3ce72486125fa281e92ac9970e8
|
@ -53,9 +53,6 @@ set(SRCS
|
||||
add_library(_libxml2 ${SRCS})
|
||||
|
||||
target_link_libraries(_libxml2 PRIVATE ch_contrib::zlib)
|
||||
if(M_LIBRARY)
|
||||
target_link_libraries(_libxml2 PRIVATE ${M_LIBRARY})
|
||||
endif()
|
||||
|
||||
target_include_directories(_libxml2 BEFORE PUBLIC "${CMAKE_CURRENT_SOURCE_DIR}/linux_x86_64/include")
|
||||
target_include_directories(_libxml2 BEFORE PUBLIC "${LIBXML2_SOURCE_DIR}/include")
|
||||
|
@ -76,9 +76,7 @@ message (STATUS "LLVM library Directory: ${LLVM_LIBRARY_DIRS}")
|
||||
message (STATUS "LLVM C++ compiler flags: ${LLVM_CXXFLAGS}")
|
||||
|
||||
# ld: unknown option: --color-diagnostics
|
||||
if (APPLE)
|
||||
set (LINKER_SUPPORTS_COLOR_DIAGNOSTICS 0 CACHE INTERNAL "")
|
||||
endif ()
|
||||
|
||||
# Do not adjust RPATH in llvm, since then it will not be able to find libcxx/libcxxabi/libunwind
|
||||
set (CMAKE_INSTALL_RPATH "ON")
|
||||
|
@ -1,6 +1,6 @@
|
||||
option(ENABLE_MINIZIP "Enable minizip-ng the zip manipulation library" ${ENABLE_LIBRARIES})
|
||||
if (NOT ENABLE_MINIZIP)
|
||||
message (STATUS "minizip-ng disabled")
|
||||
message (STATUS "Not using minizip-ng")
|
||||
return()
|
||||
endif()
|
||||
|
||||
|
2
contrib/msgpack-c
vendored
2
contrib/msgpack-c
vendored
@ -1 +1 @@
|
||||
Subproject commit 790b3fe58ebded7a8bd130782ef28bec5784c248
|
||||
Subproject commit 46684265d50b5d1b062d4c5c428ba08462844b1d
|
@ -2,12 +2,12 @@ if (NOT ENABLE_ODBC)
|
||||
return ()
|
||||
endif ()
|
||||
|
||||
set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/nanodbc")
|
||||
|
||||
if (NOT TARGET ch_contrib::unixodbc)
|
||||
message(FATAL_ERROR "Configuration error: unixodbc is not a target")
|
||||
endif()
|
||||
|
||||
set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/nanodbc")
|
||||
|
||||
set (SRCS
|
||||
"${LIBRARY_DIR}/nanodbc/nanodbc.cpp"
|
||||
)
|
||||
|
2
contrib/rapidjson
vendored
2
contrib/rapidjson
vendored
@ -1 +1 @@
|
||||
Subproject commit b571bd5c1a3b1fc931d77ae36932537a3c9018c3
|
||||
Subproject commit c4ef90ccdbc21d5d5a628d08316bfd301e32d6fa
|
@ -1,10 +1,3 @@
|
||||
option (ENABLE_REPLXX "Enable replxx support" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (NOT ENABLE_REPLXX)
|
||||
message (STATUS "Not using replxx")
|
||||
return()
|
||||
endif()
|
||||
|
||||
set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/replxx")
|
||||
|
||||
set(SRCS
|
||||
@ -22,9 +15,4 @@ set(SRCS
|
||||
|
||||
add_library (_replxx ${SRCS})
|
||||
target_include_directories(_replxx SYSTEM PUBLIC "${LIBRARY_DIR}/include")
|
||||
|
||||
if (COMPILER_CLANG)
|
||||
target_compile_options(_replxx PRIVATE -Wno-documentation)
|
||||
endif ()
|
||||
|
||||
add_library(ch_contrib::replxx ALIAS _replxx)
|
||||
|
@ -149,7 +149,3 @@ target_link_libraries(_s2 PRIVATE
|
||||
|
||||
target_include_directories(_s2 SYSTEM BEFORE PUBLIC "${S2_SOURCE_DIR}/")
|
||||
target_include_directories(_s2 SYSTEM PUBLIC "${ABSL_SOURCE_DIR}")
|
||||
|
||||
if(M_LIBRARY)
|
||||
target_link_libraries(_s2 PRIVATE ${M_LIBRARY})
|
||||
endif()
|
||||
|
2
contrib/snappy
vendored
2
contrib/snappy
vendored
@ -1 +1 @@
|
||||
Subproject commit 3786173af204d21da97180977ad6ab4321138b3d
|
||||
Subproject commit fb057edfed820212076239fd32cb2ff23e9016bf
|
@ -1,7 +1,7 @@
|
||||
option(ENABLE_THRIFT "Enable Thrift" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (NOT ENABLE_THRIFT)
|
||||
message (STATUS "thrift disabled")
|
||||
message (STATUS "Not using thrift")
|
||||
return()
|
||||
endif()
|
||||
|
||||
|
@ -294,14 +294,6 @@ target_include_directories (_unixodbc
|
||||
"${LIBRARY_DIR}/include"
|
||||
)
|
||||
target_compile_definitions (_unixodbc PRIVATE -DHAVE_CONFIG_H)
|
||||
target_compile_options (_unixodbc
|
||||
PRIVATE
|
||||
-Wno-dangling-else
|
||||
-Wno-parentheses
|
||||
-Wno-misleading-indentation
|
||||
-Wno-unknown-warning-option
|
||||
-Wno-reserved-id-macro
|
||||
-O2
|
||||
)
|
||||
target_compile_options (_unixodbc PRIVATE -O2) # intended?
|
||||
|
||||
add_library (ch_contrib::unixodbc ALIAS _unixodbc)
|
||||
|
@ -1,15 +1,16 @@
|
||||
# rebuild in #36968
|
||||
# docker build -t clickhouse/docs-builder .
|
||||
# nodejs 17 prefers ipv6 and is broken in our environment
|
||||
FROM node:16.14.2-alpine3.15
|
||||
FROM node:16-alpine
|
||||
|
||||
RUN apk add --no-cache git openssh bash
|
||||
|
||||
# TODO: clean before merge!
|
||||
ARG DOCS_BRANCH=main
|
||||
# At this point we want to really update /opt/clickhouse-docs
|
||||
# despite the cached images
|
||||
ARG CACHE_INVALIDATOR=0
|
||||
|
||||
RUN git clone https://github.com/ClickHouse/clickhouse-docs.git \
|
||||
--depth=1 --branch=${DOCS_BRANCH} /opt/clickhouse-docs
|
||||
--depth=1 --branch=main /opt/clickhouse-docs
|
||||
|
||||
WORKDIR /opt/clickhouse-docs
|
||||
|
||||
|
@ -42,6 +42,7 @@ DATA_DIR="${CLICKHOUSE_DATA_DIR:-/var/lib/clickhouse}"
|
||||
LOG_DIR="${LOG_DIR:-/var/log/clickhouse-keeper}"
|
||||
LOG_PATH="${LOG_DIR}/clickhouse-keeper.log"
|
||||
ERROR_LOG_PATH="${LOG_DIR}/clickhouse-keeper.err.log"
|
||||
COORDINATION_DIR="${DATA_DIR}/coordination"
|
||||
COORDINATION_LOG_DIR="${DATA_DIR}/coordination/log"
|
||||
COORDINATION_SNAPSHOT_DIR="${DATA_DIR}/coordination/snapshots"
|
||||
CLICKHOUSE_WATCHDOG_ENABLE=${CLICKHOUSE_WATCHDOG_ENABLE:-0}
|
||||
@ -49,6 +50,7 @@ CLICKHOUSE_WATCHDOG_ENABLE=${CLICKHOUSE_WATCHDOG_ENABLE:-0}
|
||||
for dir in "$DATA_DIR" \
|
||||
"$LOG_DIR" \
|
||||
"$TMP_DIR" \
|
||||
"$COORDINATION_DIR" \
|
||||
"$COORDINATION_LOG_DIR" \
|
||||
"$COORDINATION_SNAPSHOT_DIR"
|
||||
do
|
||||
|
@ -6,7 +6,7 @@ FROM ubuntu:20.04
|
||||
ARG apt_archive="http://archive.ubuntu.com"
|
||||
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13
|
||||
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=14
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install \
|
||||
|
@ -319,28 +319,19 @@ if __name__ == "__main__":
|
||||
)
|
||||
parser.add_argument("--output-dir", type=dir_name, required=True)
|
||||
parser.add_argument("--build-type", choices=("debug", ""), default="")
|
||||
|
||||
parser.add_argument(
|
||||
"--compiler",
|
||||
choices=(
|
||||
"clang-11",
|
||||
"clang-11-darwin",
|
||||
"clang-11-darwin-aarch64",
|
||||
"clang-11-aarch64",
|
||||
"clang-12",
|
||||
"clang-12-darwin",
|
||||
"clang-12-darwin-aarch64",
|
||||
"clang-12-aarch64",
|
||||
"clang-13",
|
||||
"clang-13-darwin",
|
||||
"clang-13-darwin-aarch64",
|
||||
"clang-13-aarch64",
|
||||
"clang-13-ppc64le",
|
||||
"clang-11-freebsd",
|
||||
"clang-12-freebsd",
|
||||
"clang-13-freebsd",
|
||||
"clang-14",
|
||||
"clang-14-darwin",
|
||||
"clang-14-darwin-aarch64",
|
||||
"clang-14-aarch64",
|
||||
"clang-14-ppc64le",
|
||||
"clang-14-freebsd",
|
||||
"gcc-11",
|
||||
),
|
||||
default="clang-13",
|
||||
default="clang-14",
|
||||
help="a compiler to use",
|
||||
)
|
||||
parser.add_argument(
|
||||
@ -348,6 +339,7 @@ if __name__ == "__main__":
|
||||
choices=("address", "thread", "memory", "undefined", ""),
|
||||
default="",
|
||||
)
|
||||
|
||||
parser.add_argument("--split-binary", action="store_true")
|
||||
parser.add_argument("--clang-tidy", action="store_true")
|
||||
parser.add_argument("--cache", choices=("ccache", "distcc", ""), default="")
|
||||
|
@ -21,7 +21,9 @@ By default, starting above server instance will be run as default user without p
|
||||
|
||||
### connect to it from a native client
|
||||
```bash
|
||||
$ docker run -it --rm --link some-clickhouse-server:clickhouse-server clickhouse/clickhouse-client --host clickhouse-server
|
||||
$ docker run -it --rm --link some-clickhouse-server:clickhouse-server --entrypoint clickhouse-client clickhouse/clickhouse-server --host clickhouse-server
|
||||
# OR
|
||||
$ docker exec -it some-clickhouse-server clickhouse-client
|
||||
```
|
||||
|
||||
More information about [ClickHouse client](https://clickhouse.com/docs/en/interfaces/cli/).
|
||||
|
@ -7,7 +7,7 @@ FROM clickhouse/test-util:$FROM_TAG
|
||||
ARG apt_archive="http://archive.ubuntu.com"
|
||||
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13
|
||||
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=14
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \
|
||||
|
@ -8,14 +8,18 @@ FROM clickhouse/binary-builder:$FROM_TAG
|
||||
ARG apt_archive="http://archive.ubuntu.com"
|
||||
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
|
||||
|
||||
RUN apt-get update && apt-get --yes --allow-unauthenticated install clang-13 libllvm13 libclang-13-dev libmlir-13-dev
|
||||
RUN apt-get update && apt-get --yes --allow-unauthenticated install clang-14 libllvm14 libclang-14-dev libmlir-14-dev
|
||||
|
||||
# repo versions doesn't work correctly with C++17
|
||||
# also we push reports to s3, so we add index.html to subfolder urls
|
||||
# https://github.com/ClickHouse-Extras/woboq_codebrowser/commit/37e15eaf377b920acb0b48dbe82471be9203f76b
|
||||
RUN git clone https://github.com/ClickHouse-Extras/woboq_codebrowser
|
||||
|
||||
RUN cd woboq_codebrowser && cmake . -DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_COMPILER=clang\+\+-13 -DCMAKE_C_COMPILER=clang-13 && make -j
|
||||
# TODO: remove branch in a few weeks after merge, e.g. in May or June 2022
|
||||
RUN git clone https://github.com/ClickHouse-Extras/woboq_codebrowser --branch llvm-14 \
|
||||
&& cd woboq_codebrowser \
|
||||
&& cmake . -DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_COMPILER=clang\+\+-14 -DCMAKE_C_COMPILER=clang-14 \
|
||||
&& make -j \
|
||||
&& cd .. \
|
||||
&& rm -rf woboq_codebrowser
|
||||
|
||||
ENV CODEGEN=/woboq_codebrowser/generator/codebrowser_generator
|
||||
ENV CODEINDEX=/woboq_codebrowser/indexgenerator/codebrowser_indexgenerator
|
||||
@ -28,7 +32,7 @@ ENV SHA=nosha
|
||||
ENV DATA="https://s3.amazonaws.com/clickhouse-test-reports/codebrowser/data"
|
||||
|
||||
CMD mkdir -p $BUILD_DIRECTORY && cd $BUILD_DIRECTORY && \
|
||||
cmake $SOURCE_DIRECTORY -DCMAKE_CXX_COMPILER=/usr/bin/clang\+\+-13 -DCMAKE_C_COMPILER=/usr/bin/clang-13 -DCMAKE_EXPORT_COMPILE_COMMANDS=ON -DENABLE_EMBEDDED_COMPILER=0 -DENABLE_S3=0 && \
|
||||
cmake $SOURCE_DIRECTORY -DCMAKE_CXX_COMPILER=/usr/bin/clang\+\+-14 -DCMAKE_C_COMPILER=/usr/bin/clang-14 -DCMAKE_EXPORT_COMPILE_COMMANDS=ON -DENABLE_EMBEDDED_COMPILER=0 -DENABLE_S3=0 && \
|
||||
mkdir -p $HTML_RESULT_DIRECTORY && \
|
||||
$CODEGEN -b $BUILD_DIRECTORY -a -o $HTML_RESULT_DIRECTORY -p ClickHouse:$SOURCE_DIRECTORY:$SHA -d $DATA | ts '%Y-%m-%d %H:%M:%S' && \
|
||||
cp -r $STATIC_DATA $HTML_RESULT_DIRECTORY/ &&\
|
||||
|
@ -7,7 +7,7 @@ FROM clickhouse/test-util:$FROM_TAG
|
||||
ARG apt_archive="http://archive.ubuntu.com"
|
||||
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13
|
||||
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=14
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \
|
||||
|
@ -12,7 +12,7 @@ stage=${stage:-}
|
||||
script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
echo "$script_dir"
|
||||
repo_dir=ch
|
||||
BINARY_TO_DOWNLOAD=${BINARY_TO_DOWNLOAD:="clang-13_debug_none_bundled_unsplitted_disable_False_binary"}
|
||||
BINARY_TO_DOWNLOAD=${BINARY_TO_DOWNLOAD:="clang-14_debug_none_bundled_unsplitted_disable_False_binary"}
|
||||
BINARY_URL_TO_DOWNLOAD=${BINARY_URL_TO_DOWNLOAD:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/$BINARY_TO_DOWNLOAD/clickhouse"}
|
||||
|
||||
function clone
|
||||
|
@ -2,7 +2,7 @@
|
||||
set -euo pipefail
|
||||
|
||||
|
||||
CLICKHOUSE_PACKAGE=${CLICKHOUSE_PACKAGE:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/clang-13_relwithdebuginfo_none_bundled_unsplitted_disable_False_binary/clickhouse"}
|
||||
CLICKHOUSE_PACKAGE=${CLICKHOUSE_PACKAGE:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/clang-14_relwithdebuginfo_none_bundled_unsplitted_disable_False_binary/clickhouse"}
|
||||
CLICKHOUSE_REPO_PATH=${CLICKHOUSE_REPO_PATH:=""}
|
||||
|
||||
|
||||
|
@ -207,6 +207,13 @@ function run_tests
|
||||
test_files=($(ls "$test_prefix"/*.xml))
|
||||
fi
|
||||
|
||||
# We can filter out certain tests
|
||||
if [ -v CHPC_TEST_GREP_EXCLUDE ]; then
|
||||
# filter tests array in bash https://stackoverflow.com/a/40375567
|
||||
filtered_test_files=( $( for i in ${test_files[@]} ; do echo $i ; done | grep -v ${CHPC_TEST_GREP_EXCLUDE} ) )
|
||||
test_files=("${filtered_test_files[@]}")
|
||||
fi
|
||||
|
||||
# We split perf tests into multiple checks to make them faster
|
||||
if [ -v CHPC_TEST_RUN_BY_HASH_TOTAL ]; then
|
||||
# filter tests array in bash https://stackoverflow.com/a/40375567
|
||||
|
@ -7,23 +7,12 @@ RUN apt-get update -y \
|
||||
&& env DEBIAN_FRONTEND=noninteractive \
|
||||
apt-get install --yes --no-install-recommends \
|
||||
python3-requests \
|
||||
llvm-9
|
||||
&& apt-get clean
|
||||
|
||||
COPY s3downloader /s3downloader
|
||||
|
||||
ENV S3_URL="https://clickhouse-datasets.s3.amazonaws.com"
|
||||
ENV DATASETS="hits visits"
|
||||
ENV EXPORT_S3_STORAGE_POLICIES=1
|
||||
|
||||
# Download Minio-related binaries
|
||||
RUN arch=${TARGETARCH:-amd64} \
|
||||
&& wget "https://dl.min.io/server/minio/release/linux-${arch}/minio" \
|
||||
&& chmod +x ./minio \
|
||||
&& wget "https://dl.min.io/client/mc/release/linux-${arch}/mc" \
|
||||
&& chmod +x ./mc
|
||||
ENV MINIO_ROOT_USER="clickhouse"
|
||||
ENV MINIO_ROOT_PASSWORD="clickhouse"
|
||||
COPY setup_minio.sh /
|
||||
|
||||
COPY run.sh /
|
||||
CMD ["/bin/bash", "/run.sh"]
|
||||
|
@ -17,7 +17,7 @@ ln -s /usr/share/clickhouse-test/clickhouse-test /usr/bin/clickhouse-test
|
||||
# install test configs
|
||||
/usr/share/clickhouse-test/config/install.sh
|
||||
|
||||
./setup_minio.sh
|
||||
./setup_minio.sh stateful
|
||||
|
||||
function start()
|
||||
{
|
||||
|
@ -1,66 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# TODO: Make this file shared with stateless tests
|
||||
#
|
||||
# Usage for local run:
|
||||
#
|
||||
# ./docker/test/stateful/setup_minio.sh ./tests/
|
||||
#
|
||||
|
||||
set -e -x -a -u
|
||||
|
||||
ls -lha
|
||||
|
||||
mkdir -p ./minio_data
|
||||
|
||||
if [ ! -f ./minio ]; then
|
||||
echo 'MinIO binary not found, downloading...'
|
||||
|
||||
BINARY_TYPE=$(uname -s | tr '[:upper:]' '[:lower:]')
|
||||
|
||||
wget "https://dl.min.io/server/minio/release/${BINARY_TYPE}-amd64/minio" \
|
||||
&& chmod +x ./minio \
|
||||
&& wget "https://dl.min.io/client/mc/release/${BINARY_TYPE}-amd64/mc" \
|
||||
&& chmod +x ./mc
|
||||
fi
|
||||
|
||||
MINIO_ROOT_USER=${MINIO_ROOT_USER:-clickhouse}
|
||||
MINIO_ROOT_PASSWORD=${MINIO_ROOT_PASSWORD:-clickhouse}
|
||||
|
||||
./minio server --address ":11111" ./minio_data &
|
||||
|
||||
while ! curl -v --silent http://localhost:11111 2>&1 | grep AccessDenied
|
||||
do
|
||||
echo "Trying to connect to minio"
|
||||
sleep 1
|
||||
done
|
||||
|
||||
lsof -i :11111
|
||||
|
||||
sleep 5
|
||||
|
||||
./mc alias set clickminio http://localhost:11111 clickhouse clickhouse
|
||||
./mc admin user add clickminio test testtest
|
||||
./mc admin policy set clickminio readwrite user=test
|
||||
./mc mb clickminio/test
|
||||
|
||||
|
||||
# Upload data to Minio. By default after unpacking all tests will in
|
||||
# /usr/share/clickhouse-test/queries
|
||||
|
||||
TEST_PATH=${1:-/usr/share/clickhouse-test}
|
||||
MINIO_DATA_PATH=${TEST_PATH}/queries/1_stateful/data_minio
|
||||
|
||||
# Iterating over globs will cause redudant FILE variale to be a path to a file, not a filename
|
||||
# shellcheck disable=SC2045
|
||||
for FILE in $(ls "${MINIO_DATA_PATH}"); do
|
||||
echo "$FILE";
|
||||
./mc cp "${MINIO_DATA_PATH}"/"$FILE" clickminio/test/"$FILE";
|
||||
done
|
||||
|
||||
mkdir -p ~/.aws
|
||||
cat <<EOT >> ~/.aws/credentials
|
||||
[default]
|
||||
aws_access_key_id=clickhouse
|
||||
aws_secret_access_key=clickhouse
|
||||
EOT
|
1
docker/test/stateful/setup_minio.sh
Symbolic link
1
docker/test/stateful/setup_minio.sh
Symbolic link
@ -0,0 +1 @@
|
||||
../stateless/setup_minio.sh
|
@ -5,34 +5,37 @@ FROM clickhouse/test-base:$FROM_TAG
|
||||
|
||||
ARG odbc_driver_url="https://github.com/ClickHouse/clickhouse-odbc/releases/download/v1.1.4.20200302/clickhouse-odbc-1.1.4-Linux.tar.gz"
|
||||
|
||||
# golang version 1.13 on Ubuntu 20 is enough for tests
|
||||
RUN apt-get update -y \
|
||||
&& env DEBIAN_FRONTEND=noninteractive \
|
||||
apt-get install --yes --no-install-recommends \
|
||||
awscli \
|
||||
brotli \
|
||||
expect \
|
||||
zstd \
|
||||
golang \
|
||||
lsof \
|
||||
mysql-client=8.0* \
|
||||
ncdu \
|
||||
netcat-openbsd \
|
||||
openjdk-11-jre-headless \
|
||||
openssl \
|
||||
postgresql-client \
|
||||
protobuf-compiler \
|
||||
python3 \
|
||||
python3-lxml \
|
||||
python3-pip \
|
||||
python3-requests \
|
||||
python3-termcolor \
|
||||
python3-pip \
|
||||
qemu-user-static \
|
||||
sqlite3 \
|
||||
sudo \
|
||||
# golang version 1.13 on Ubuntu 20 is enough for tests
|
||||
golang \
|
||||
telnet \
|
||||
tree \
|
||||
unixodbc \
|
||||
wget \
|
||||
mysql-client=8.0* \
|
||||
postgresql-client \
|
||||
sqlite3 \
|
||||
awscli
|
||||
zstd \
|
||||
&& apt-get clean
|
||||
|
||||
|
||||
RUN pip3 install numpy scipy pandas Jinja2
|
||||
|
||||
@ -49,14 +52,22 @@ RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
||||
ENV NUM_TRIES=1
|
||||
ENV MAX_RUN_TIME=0
|
||||
|
||||
# Unrelated to vars in setup_minio.sh, but should be the same there
|
||||
# to have the same binaries for local running scenario
|
||||
ARG MINIO_SERVER_VERSION=2022-01-03T18-22-58Z
|
||||
ARG MINIO_CLIENT_VERSION=2022-01-05T23-52-51Z
|
||||
ARG TARGETARCH
|
||||
|
||||
# Download Minio-related binaries
|
||||
RUN arch=${TARGETARCH:-amd64} \
|
||||
&& wget "https://dl.min.io/server/minio/release/linux-${arch}/minio" \
|
||||
&& chmod +x ./minio \
|
||||
&& wget "https://dl.min.io/client/mc/release/linux-${arch}/mc" \
|
||||
&& chmod +x ./mc
|
||||
&& wget "https://dl.min.io/server/minio/release/linux-${arch}/archive/minio.RELEASE.${MINIO_SERVER_VERSION}" -O ./minio \
|
||||
&& wget "https://dl.min.io/client/mc/release/linux-${arch}/archive/mc.RELEASE.${MINIO_CLIENT_VERSION}" -O ./mc \
|
||||
&& chmod +x ./mc ./minio
|
||||
|
||||
|
||||
RUN wget 'https://dlcdn.apache.org/hadoop/common/hadoop-3.3.1/hadoop-3.3.1.tar.gz' \
|
||||
&& tar -xvf hadoop-3.3.1.tar.gz \
|
||||
&& rm -rf hadoop-3.3.1.tar.gz
|
||||
|
||||
ENV MINIO_ROOT_USER="clickhouse"
|
||||
ENV MINIO_ROOT_PASSWORD="clickhouse"
|
||||
@ -64,4 +75,5 @@ ENV EXPORT_S3_STORAGE_POLICIES=1
|
||||
|
||||
COPY run.sh /
|
||||
COPY setup_minio.sh /
|
||||
COPY setup_hdfs_minicluster.sh /
|
||||
CMD ["/bin/bash", "/run.sh"]
|
||||
|
@ -18,7 +18,8 @@ ln -s /usr/share/clickhouse-test/clickhouse-test /usr/bin/clickhouse-test
|
||||
# install test configs
|
||||
/usr/share/clickhouse-test/config/install.sh
|
||||
|
||||
./setup_minio.sh
|
||||
./setup_minio.sh stateless
|
||||
./setup_hdfs_minicluster.sh
|
||||
|
||||
# For flaky check we also enable thread fuzzer
|
||||
if [ "$NUM_TRIES" -gt "1" ]; then
|
||||
@ -92,8 +93,6 @@ function run_tests()
|
||||
|
||||
if [[ -n "$USE_DATABASE_REPLICATED" ]] && [[ "$USE_DATABASE_REPLICATED" -eq 1 ]]; then
|
||||
ADDITIONAL_OPTIONS+=('--replicated-database')
|
||||
# Cannot be used with replicated database, due to distributed_ddl_output_mode=none
|
||||
ADDITIONAL_OPTIONS+=('--no-left-queries-check')
|
||||
ADDITIONAL_OPTIONS+=('--jobs')
|
||||
ADDITIONAL_OPTIONS+=('2')
|
||||
else
|
||||
|
20
docker/test/stateless/setup_hdfs_minicluster.sh
Executable file
20
docker/test/stateless/setup_hdfs_minicluster.sh
Executable file
@ -0,0 +1,20 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e -x -a -u
|
||||
|
||||
ls -lha
|
||||
|
||||
cd hadoop-3.3.1
|
||||
|
||||
export JAVA_HOME=/usr
|
||||
mkdir -p target/test/data
|
||||
chown clickhouse ./target/test/data
|
||||
sudo -E -u clickhouse bin/mapred minicluster -format -nomr -nnport 12222 &
|
||||
|
||||
while ! nc -z localhost 12222; do
|
||||
sleep 1
|
||||
done
|
||||
|
||||
lsof -i :12222
|
||||
|
||||
sleep 5
|
@ -1,36 +1,59 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Usage for local run:
|
||||
#
|
||||
# ./docker/test/stateless/setup_minio.sh ./tests/
|
||||
#
|
||||
USAGE='Usage for local run:
|
||||
|
||||
./docker/test/stateless/setup_minio.sh { stateful | stateless } ./tests/
|
||||
|
||||
'
|
||||
|
||||
set -e -x -a -u
|
||||
|
||||
TEST_TYPE="$1"
|
||||
shift
|
||||
|
||||
case $TEST_TYPE in
|
||||
stateless) QUERY_DIR=0_stateless ;;
|
||||
stateful) QUERY_DIR=1_stateful ;;
|
||||
*) echo "unknown test type $TEST_TYPE"; echo "${USAGE}"; exit 1 ;;
|
||||
esac
|
||||
|
||||
ls -lha
|
||||
|
||||
mkdir -p ./minio_data
|
||||
|
||||
if [ ! -f ./minio ]; then
|
||||
MINIO_SERVER_VERSION=${MINIO_SERVER_VERSION:-2022-01-03T18-22-58Z}
|
||||
MINIO_CLIENT_VERSION=${MINIO_CLIENT_VERSION:-2022-01-05T23-52-51Z}
|
||||
case $(uname -m) in
|
||||
x86_64) BIN_ARCH=amd64 ;;
|
||||
aarch64) BIN_ARCH=arm64 ;;
|
||||
*) echo "unknown architecture $(uname -m)"; exit 1 ;;
|
||||
esac
|
||||
echo 'MinIO binary not found, downloading...'
|
||||
|
||||
BINARY_TYPE=$(uname -s | tr '[:upper:]' '[:lower:]')
|
||||
|
||||
wget "https://dl.min.io/server/minio/release/${BINARY_TYPE}-amd64/minio" \
|
||||
&& chmod +x ./minio \
|
||||
&& wget "https://dl.min.io/client/mc/release/${BINARY_TYPE}-amd64/mc" \
|
||||
&& chmod +x ./mc
|
||||
wget "https://dl.min.io/server/minio/release/${BINARY_TYPE}-${BIN_ARCH}/archive/minio.RELEASE.${MINIO_SERVER_VERSION}" -O ./minio \
|
||||
&& wget "https://dl.min.io/client/mc/release/${BINARY_TYPE}-${BIN_ARCH}/archive/mc.RELEASE.${MINIO_CLIENT_VERSION}" -O ./mc \
|
||||
&& chmod +x ./mc ./minio
|
||||
fi
|
||||
|
||||
MINIO_ROOT_USER=${MINIO_ROOT_USER:-clickhouse}
|
||||
MINIO_ROOT_PASSWORD=${MINIO_ROOT_PASSWORD:-clickhouse}
|
||||
|
||||
./minio --version
|
||||
./minio server --address ":11111" ./minio_data &
|
||||
|
||||
i=0
|
||||
while ! curl -v --silent http://localhost:11111 2>&1 | grep AccessDenied
|
||||
do
|
||||
if [[ $i == 60 ]]; then
|
||||
echo "Failed to setup minio"
|
||||
exit 0
|
||||
fi
|
||||
echo "Trying to connect to minio"
|
||||
sleep 1
|
||||
i=$((i + 1))
|
||||
done
|
||||
|
||||
lsof -i :11111
|
||||
@ -41,14 +64,16 @@ sleep 5
|
||||
./mc admin user add clickminio test testtest
|
||||
./mc admin policy set clickminio readwrite user=test
|
||||
./mc mb clickminio/test
|
||||
if [ "$TEST_TYPE" = "stateless" ]; then
|
||||
./mc policy set public clickminio/test
|
||||
fi
|
||||
|
||||
|
||||
# Upload data to Minio. By default after unpacking all tests will in
|
||||
# /usr/share/clickhouse-test/queries
|
||||
|
||||
TEST_PATH=${1:-/usr/share/clickhouse-test}
|
||||
MINIO_DATA_PATH=${TEST_PATH}/queries/0_stateless/data_minio
|
||||
MINIO_DATA_PATH=${TEST_PATH}/queries/${QUERY_DIR}/data_minio
|
||||
|
||||
# Iterating over globs will cause redudant FILE variale to be a path to a file, not a filename
|
||||
# shellcheck disable=SC2045
|
||||
@ -60,6 +85,6 @@ done
|
||||
mkdir -p ~/.aws
|
||||
cat <<EOT >> ~/.aws/credentials
|
||||
[default]
|
||||
aws_access_key_id=clickhouse
|
||||
aws_secret_access_key=clickhouse
|
||||
aws_access_key_id=${MINIO_ROOT_USER}
|
||||
aws_secret_access_key=${MINIO_ROOT_PASSWORD}
|
||||
EOT
|
||||
|
@ -25,7 +25,6 @@ RUN apt-get update -y \
|
||||
brotli
|
||||
|
||||
COPY ./stress /stress
|
||||
COPY ./download_previous_release /download_previous_release
|
||||
COPY run.sh /
|
||||
|
||||
ENV DATASETS="hits visits"
|
||||
|
@ -1,110 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import requests
|
||||
import re
|
||||
import os
|
||||
|
||||
from requests.adapters import HTTPAdapter
|
||||
from requests.packages.urllib3.util.retry import Retry
|
||||
|
||||
CLICKHOUSE_TAGS_URL = "https://api.github.com/repos/ClickHouse/ClickHouse/tags"
|
||||
|
||||
CLICKHOUSE_COMMON_STATIC_DOWNLOAD_URL = "https://github.com/ClickHouse/ClickHouse/releases/download/v{version}-{type}/clickhouse-common-static_{version}_amd64.deb"
|
||||
CLICKHOUSE_COMMON_STATIC_DBG_DOWNLOAD_URL = "https://github.com/ClickHouse/ClickHouse/releases/download/v{version}-{type}/clickhouse-common-static-dbg_{version}_amd64.deb"
|
||||
CLICKHOUSE_SERVER_DOWNLOAD_URL = "https://github.com/ClickHouse/ClickHouse/releases/download/v{version}-{type}/clickhouse-server_{version}_all.deb"
|
||||
CLICKHOUSE_CLIENT_DOWNLOAD_URL = "https://github.com/ClickHouse/ClickHouse/releases/download/v{version}-{type}/clickhouse-client_{version}_amd64.deb"
|
||||
|
||||
|
||||
CLICKHOUSE_COMMON_STATIC_PACKET_NAME = "clickhouse-common-static_{version}_amd64.deb"
|
||||
CLICKHOUSE_COMMON_STATIC_DBG_PACKET_NAME = "clickhouse-common-static-dbg_{version}_amd64.deb"
|
||||
CLICKHOUSE_SERVER_PACKET_NAME = "clickhouse-server_{version}_all.deb"
|
||||
CLICKHOUSE_CLIENT_PACKET_NAME = "clickhouse-client_{version}_all.deb"
|
||||
|
||||
PACKETS_DIR = "previous_release_package_folder/"
|
||||
VERSION_PATTERN = r"((?:\d+\.)?(?:\d+\.)?(?:\d+\.)?\d+-[a-zA-Z]*)"
|
||||
|
||||
|
||||
class Version:
|
||||
def __init__(self, version):
|
||||
self.version = version
|
||||
|
||||
def __lt__(self, other):
|
||||
return list(map(int, self.version.split('.'))) < list(map(int, other.version.split('.')))
|
||||
|
||||
def __str__(self):
|
||||
return self.version
|
||||
|
||||
|
||||
class ReleaseInfo:
|
||||
def __init__(self, version, release_type):
|
||||
self.version = version
|
||||
self.type = release_type
|
||||
|
||||
|
||||
def find_previous_release(server_version, releases):
|
||||
releases.sort(key=lambda x: x.version, reverse=True)
|
||||
for release in releases:
|
||||
if release.version < server_version:
|
||||
return True, release
|
||||
|
||||
return False, None
|
||||
|
||||
|
||||
def get_previous_release(server_version):
|
||||
page = 1
|
||||
found = False
|
||||
while not found:
|
||||
response = requests.get(CLICKHOUSE_TAGS_URL, {'page': page, 'per_page': 100})
|
||||
if not response.ok:
|
||||
raise Exception('Cannot load the list of tags from github: ' + response.reason)
|
||||
|
||||
releases_str = set(re.findall(VERSION_PATTERN, response.text))
|
||||
if len(releases_str) == 0:
|
||||
raise Exception('Cannot find previous release for ' + str(server_version) + ' server version')
|
||||
|
||||
releases = list(map(lambda x: ReleaseInfo(Version(x.split('-')[0]), x.split('-')[1]), releases_str))
|
||||
found, previous_release = find_previous_release(server_version, releases)
|
||||
page += 1
|
||||
|
||||
return previous_release
|
||||
|
||||
|
||||
def download_packet(url, local_file_name, retries=10, backoff_factor=0.3):
|
||||
session = requests.Session()
|
||||
retry = Retry(
|
||||
total=retries,
|
||||
read=retries,
|
||||
connect=retries,
|
||||
backoff_factor=backoff_factor,
|
||||
)
|
||||
adapter = HTTPAdapter(max_retries=retry)
|
||||
session.mount('http://', adapter)
|
||||
session.mount('https://', adapter)
|
||||
response = session.get(url)
|
||||
print(url)
|
||||
if response.ok:
|
||||
open(PACKETS_DIR + local_file_name, 'wb').write(response.content)
|
||||
|
||||
|
||||
def download_packets(release):
|
||||
if not os.path.exists(PACKETS_DIR):
|
||||
os.makedirs(PACKETS_DIR)
|
||||
|
||||
download_packet(CLICKHOUSE_COMMON_STATIC_DOWNLOAD_URL.format(version=release.version, type=release.type),
|
||||
CLICKHOUSE_COMMON_STATIC_PACKET_NAME.format(version=release.version))
|
||||
|
||||
download_packet(CLICKHOUSE_COMMON_STATIC_DBG_DOWNLOAD_URL.format(version=release.version, type=release.type),
|
||||
CLICKHOUSE_COMMON_STATIC_DBG_PACKET_NAME.format(version=release.version))
|
||||
|
||||
download_packet(CLICKHOUSE_SERVER_DOWNLOAD_URL.format(version=release.version, type=release.type),
|
||||
CLICKHOUSE_SERVER_PACKET_NAME.format(version=release.version))
|
||||
|
||||
download_packet(CLICKHOUSE_CLIENT_DOWNLOAD_URL.format(version=release.version, type=release.type),
|
||||
CLICKHOUSE_CLIENT_PACKET_NAME.format(version=release.version))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
server_version = Version(input())
|
||||
previous_release = get_previous_release(server_version)
|
||||
download_packets(previous_release)
|
||||
|
@ -43,6 +43,7 @@ function configure()
|
||||
|
||||
# we mount tests folder from repo to /usr/share
|
||||
ln -s /usr/share/clickhouse-test/clickhouse-test /usr/bin/clickhouse-test
|
||||
ln -s /usr/share/clickhouse-test/ci/download_previous_release.py /usr/bin/download_previous_release
|
||||
|
||||
# avoid too slow startup
|
||||
sudo cat /etc/clickhouse-server/config.d/keeper_port.xml | sed "s|<snapshot_distance>100000</snapshot_distance>|<snapshot_distance>10000</snapshot_distance>|" > /etc/clickhouse-server/config.d/keeper_port.xml.tmp
|
||||
@ -173,7 +174,7 @@ install_packages package_folder
|
||||
|
||||
configure
|
||||
|
||||
./setup_minio.sh
|
||||
./setup_minio.sh stateful # to have a proper environment
|
||||
|
||||
start
|
||||
|
||||
@ -262,7 +263,7 @@ echo -e "Backward compatibility check\n"
|
||||
|
||||
echo "Download previous release server"
|
||||
mkdir previous_release_package_folder
|
||||
clickhouse-client --query="SELECT version()" | ./download_previous_release && echo -e 'Download script exit code\tOK' >> /test_output/test_results.tsv \
|
||||
clickhouse-client --query="SELECT version()" | download_previous_release && echo -e 'Download script exit code\tOK' >> /test_output/test_results.tsv \
|
||||
|| echo -e 'Download script failed\tFAIL' >> /test_output/test_results.tsv
|
||||
|
||||
stop
|
||||
@ -338,6 +339,7 @@ then
|
||||
-e "Code: 1000, e.code() = 111, Connection refused" \
|
||||
-e "UNFINISHED" \
|
||||
-e "Renaming unexpected part" \
|
||||
-e "PART_IS_TEMPORARILY_LOCKED" \
|
||||
/var/log/clickhouse-server/clickhouse-server.backward.clean.log | zgrep -Fa "<Error>" > /test_output/bc_check_error_messages.txt \
|
||||
&& echo -e 'Backward compatibility check: Error message in clickhouse-server.log (see bc_check_error_messages.txt)\tFAIL' >> /test_output/test_results.tsv \
|
||||
|| echo -e 'Backward compatibility check: No Error messages in clickhouse-server.log\tOK' >> /test_output/test_results.tsv
|
||||
|
@ -8,16 +8,16 @@ ARG apt_archive="http://archive.ubuntu.com"
|
||||
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
|
||||
|
||||
RUN apt-get update && env DEBIAN_FRONTEND=noninteractive apt-get install --yes \
|
||||
aspell \
|
||||
curl \
|
||||
git \
|
||||
libxml2-utils \
|
||||
moreutils \
|
||||
pylint \
|
||||
python3-fuzzywuzzy \
|
||||
python3-pip \
|
||||
shellcheck \
|
||||
yamllint \
|
||||
&& pip3 install black boto3 codespell dohq-artifactory PyGithub unidiff
|
||||
&& pip3 install black boto3 codespell dohq-artifactory PyGithub unidiff pylint==2.6.2
|
||||
|
||||
# Architecture of the image when BuildKit/buildx is used
|
||||
ARG TARGETARCH
|
||||
|
@ -18,6 +18,7 @@ def process_result(result_folder):
|
||||
("typos", "typos_output.txt"),
|
||||
("whitespaces", "whitespaces_output.txt"),
|
||||
("workflows", "workflows_output.txt"),
|
||||
("doc typos", "doc_spell_output.txt"),
|
||||
)
|
||||
|
||||
for name, out_file in checks:
|
||||
|
@ -11,6 +11,8 @@ echo "Check python formatting with black" | ts
|
||||
./check-black -n |& tee /test_output/black_output.txt
|
||||
echo "Check typos" | ts
|
||||
./check-typos |& tee /test_output/typos_output.txt
|
||||
echo "Check docs spelling" | ts
|
||||
./check-doc-aspell |& tee /test_output/doc_spell_output.txt
|
||||
echo "Check whitespaces" | ts
|
||||
./check-whitespaces -n |& tee /test_output/whitespaces_output.txt
|
||||
echo "Check workflows" | ts
|
||||
|
@ -6,8 +6,8 @@ Minimal ClickHouse build example:
|
||||
|
||||
```bash
|
||||
cmake .. \
|
||||
-DCMAKE_C_COMPILER=$(which clang-13) \
|
||||
-DCMAKE_CXX_COMPILER=$(which clang++-13) \
|
||||
-DCMAKE_C_COMPILER=$(which clang-14) \
|
||||
-DCMAKE_CXX_COMPILER=$(which clang++-14) \
|
||||
-DCMAKE_BUILD_TYPE=Debug \
|
||||
-DENABLE_UTILS=OFF \
|
||||
-DENABLE_TESTS=OFF
|
||||
|
@ -138,7 +138,7 @@ It's important to name tests correctly, so one could turn some tests subset off
|
||||
|
||||
| Tester flag| What should be in test name | When flag should be added |
|
||||
|---|---|---|---|
|
||||
| `--[no-]zookeeper`| "zookeeper" or "replica" | Test uses tables from ReplicatedMergeTree family |
|
||||
| `--[no-]zookeeper`| "zookeeper" or "replica" | Test uses tables from `ReplicatedMergeTree` family |
|
||||
| `--[no-]shard` | "shard" or "distributed" or "global"| Test using connections to 127.0.0.2 or similar |
|
||||
| `--[no-]long` | "long" or "deadlock" or "race" | Test runs longer than 60 seconds |
|
||||
|
||||
|
@ -149,7 +149,7 @@ The server implements several different interfaces:
|
||||
- A TCP interface for the native ClickHouse client and for cross-server communication during distributed query execution.
|
||||
- An interface for transferring data for replication.
|
||||
|
||||
Internally, it is just a primitive multithreaded server without coroutines or fibers. Since the server is not designed to process a high rate of simple queries but to process a relatively low rate of complex queries, each of them can process a vast amount of data for analytics.
|
||||
Internally, it is just a primitive multithread server without coroutines or fibers. Since the server is not designed to process a high rate of simple queries but to process a relatively low rate of complex queries, each of them can process a vast amount of data for analytics.
|
||||
|
||||
The server initializes the `Context` class with the necessary environment for query execution: the list of available databases, users and access rights, settings, clusters, the process list, the query log, and so on. Interpreters use this environment.
|
||||
|
||||
@ -178,7 +178,7 @@ To execute queries and do side activities ClickHouse allocates threads from one
|
||||
|
||||
Server pool is a `Poco::ThreadPool` class instance defined in `Server::main()` method. It can have at most `max_connection` threads. Every thread is dedicated to a single active connection.
|
||||
|
||||
Global thread pool is `GlobalThreadPool` singleton class. To allocate thread from it `ThreadFromGlobalPool` is used. It has an interface similar to `std::thread`, but pulls thread from the global pool and does all necessary initializations. It is configured with the following settings:
|
||||
Global thread pool is `GlobalThreadPool` singleton class. To allocate thread from it `ThreadFromGlobalPool` is used. It has an interface similar to `std::thread`, but pulls thread from the global pool and does all necessary initialization. It is configured with the following settings:
|
||||
* `max_thread_pool_size` - limit on thread count in pool.
|
||||
* `max_thread_pool_free_size` - limit on idle thread count waiting for new jobs.
|
||||
* `thread_pool_queue_size` - limit on scheduled job count.
|
||||
@ -189,7 +189,7 @@ IO thread pool is implemented as a plain `ThreadPool` accessible via `IOThreadPo
|
||||
|
||||
For periodic task execution there is `BackgroundSchedulePool` class. You can register tasks using `BackgroundSchedulePool::TaskHolder` objects and the pool ensures that no task runs two jobs at the same time. It also allows you to postpone task execution to a specific instant in the future or temporarily deactivate task. Global `Context` provides a few instances of this class for different purposes. For general purpose tasks `Context::getSchedulePool()` is used.
|
||||
|
||||
There are also specialized thread pools for preemptable tasks. Such `IExecutableTask` task can be split into ordered sequence of jobs, called steps. To schedule these tasks in a manner allowing short tasks to be prioritied over long ones `MergeTreeBackgroundExecutor` is used. As name suggests it is used for background MergeTree related operations such as merges, mutations, fetches and moves. Pool instances are available using `Context::getCommonExecutor()` and other similar methods.
|
||||
There are also specialized thread pools for preemptable tasks. Such `IExecutableTask` task can be split into ordered sequence of jobs, called steps. To schedule these tasks in a manner allowing short tasks to be prioritized over long ones `MergeTreeBackgroundExecutor` is used. As name suggests it is used for background MergeTree related operations such as merges, mutations, fetches and moves. Pool instances are available using `Context::getCommonExecutor()` and other similar methods.
|
||||
|
||||
No matter what pool is used for a job, at start `ThreadStatus` instance is created for this job. It encapsulates all per-thread information: thread id, query id, performance counters, resource consumption and many other useful data. Job can access it via thread local pointer by `CurrentThread::get()` call, so we do not need to pass it to every function.
|
||||
|
||||
@ -201,7 +201,7 @@ Servers in a cluster setup are mostly independent. You can create a `Distributed
|
||||
|
||||
Things become more complicated when you have subqueries in IN or JOIN clauses, and each of them uses a `Distributed` table. We have different strategies for the execution of these queries.
|
||||
|
||||
There is no global query plan for distributed query execution. Each node has its local query plan for its part of the job. We only have simple one-pass distributed query execution: we send queries for remote nodes and then merge the results. But this is not feasible for complicated queries with high cardinality GROUP BYs or with a large amount of temporary data for JOIN. In such cases, we need to “reshuffle” data between servers, which requires additional coordination. ClickHouse does not support that kind of query execution, and we need to work on it.
|
||||
There is no global query plan for distributed query execution. Each node has its local query plan for its part of the job. We only have simple one-pass distributed query execution: we send queries for remote nodes and then merge the results. But this is not feasible for complicated queries with high cardinality `GROUP BY`s or with a large amount of temporary data for JOIN. In such cases, we need to “reshuffle” data between servers, which requires additional coordination. ClickHouse does not support that kind of query execution, and we need to work on it.
|
||||
|
||||
## Merge Tree {#merge-tree}
|
||||
|
||||
|
@ -10,7 +10,7 @@ This is intended for continuous integration checks that run on Linux servers.
|
||||
|
||||
The cross-build for AARCH64 is based on the [Build instructions](../development/build.md), follow them first.
|
||||
|
||||
## Install Clang-13
|
||||
## Install Clang-14 or newer
|
||||
|
||||
Follow the instructions from https://apt.llvm.org/ for your Ubuntu or Debian setup or do
|
||||
```
|
||||
@ -31,7 +31,7 @@ tar xJf gcc-arm-8.3-2019.03-x86_64-aarch64-linux-gnu.tar.xz -C build-aarch64/cma
|
||||
``` bash
|
||||
cd ClickHouse
|
||||
mkdir build-arm64
|
||||
CC=clang-13 CXX=clang++-13 cmake . -Bbuild-arm64 -DCMAKE_TOOLCHAIN_FILE=cmake/linux/toolchain-aarch64.cmake
|
||||
CC=clang-14 CXX=clang++-14 cmake . -Bbuild-arm64 -DCMAKE_TOOLCHAIN_FILE=cmake/linux/toolchain-aarch64.cmake
|
||||
ninja -C build-arm64
|
||||
```
|
||||
|
||||
|
@ -10,14 +10,14 @@ This is intended for continuous integration checks that run on Linux servers. If
|
||||
|
||||
The cross-build for Mac OS X is based on the [Build instructions](../development/build.md), follow them first.
|
||||
|
||||
## Install Clang-13
|
||||
## Install Clang-14
|
||||
|
||||
Follow the instructions from https://apt.llvm.org/ for your Ubuntu or Debian setup.
|
||||
For example the commands for Bionic are like:
|
||||
|
||||
``` bash
|
||||
sudo echo "deb [trusted=yes] http://apt.llvm.org/bionic/ llvm-toolchain-bionic-13 main" >> /etc/apt/sources.list
|
||||
sudo apt-get install clang-13
|
||||
sudo echo "deb [trusted=yes] http://apt.llvm.org/bionic/ llvm-toolchain-bionic-14 main" >> /etc/apt/sources.list
|
||||
sudo apt-get install clang-14
|
||||
```
|
||||
|
||||
## Install Cross-Compilation Toolset {#install-cross-compilation-toolset}
|
||||
|
@ -23,7 +23,7 @@ sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)"
|
||||
``` bash
|
||||
cd ClickHouse
|
||||
mkdir build-riscv64
|
||||
CC=clang-13 CXX=clang++-13 cmake . -Bbuild-riscv64 -G Ninja -DCMAKE_TOOLCHAIN_FILE=cmake/linux/toolchain-riscv64.cmake -DGLIBC_COMPATIBILITY=OFF -DENABLE_LDAP=OFF -DOPENSSL_NO_ASM=ON -DENABLE_JEMALLOC=ON -DENABLE_PARQUET=OFF -DENABLE_ORC=OFF -DUSE_UNWIND=OFF -DENABLE_GRPC=OFF -DENABLE_HDFS=OFF -DENABLE_MYSQL=OFF
|
||||
CC=clang-14 CXX=clang++-14 cmake . -Bbuild-riscv64 -G Ninja -DCMAKE_TOOLCHAIN_FILE=cmake/linux/toolchain-riscv64.cmake -DGLIBC_COMPATIBILITY=OFF -DENABLE_LDAP=OFF -DOPENSSL_NO_ASM=ON -DENABLE_JEMALLOC=ON -DENABLE_PARQUET=OFF -DENABLE_ORC=OFF -DUSE_UNWIND=OFF -DENABLE_GRPC=OFF -DENABLE_HDFS=OFF -DENABLE_MYSQL=OFF
|
||||
ninja -C build-riscv64
|
||||
```
|
||||
|
||||
|
@ -20,9 +20,9 @@ It is also possible to compile with Apple's XCode `apple-clang` or Homebrew's `g
|
||||
|
||||
First install [Homebrew](https://brew.sh/)
|
||||
|
||||
## For Apple's Clang (discouraged): Install Xcode and Command Line Tools {#install-xcode-and-command-line-tools}
|
||||
## For Apple's Clang (discouraged): Install XCode and Command Line Tools {#install-xcode-and-command-line-tools}
|
||||
|
||||
Install the latest [Xcode](https://apps.apple.com/am/app/xcode/id497799835?mt=12) from App Store.
|
||||
Install the latest [XCode](https://apps.apple.com/am/app/xcode/id497799835?mt=12) from App Store.
|
||||
|
||||
Open it at least once to accept the end-user license agreement and automatically install the required components.
|
||||
|
||||
@ -62,7 +62,7 @@ cmake --build build
|
||||
# The resulting binary will be created at: build/programs/clickhouse
|
||||
```
|
||||
|
||||
To build using Xcode's native AppleClang compiler in Xcode IDE (this option is only for development builds and workflows, and is **not recommended** unless you know what you are doing):
|
||||
To build using XCode native AppleClang compiler in XCode IDE (this option is only for development builds and workflows, and is **not recommended** unless you know what you are doing):
|
||||
|
||||
``` bash
|
||||
cd ClickHouse
|
||||
@ -71,7 +71,7 @@ mkdir build
|
||||
cd build
|
||||
XCODE_IDE=1 ALLOW_APPLECLANG=1 cmake -G Xcode -DCMAKE_BUILD_TYPE=Debug -DENABLE_JEMALLOC=OFF ..
|
||||
cmake --open .
|
||||
# ...then, in Xcode IDE select ALL_BUILD scheme and start the building process.
|
||||
# ...then, in XCode IDE select ALL_BUILD scheme and start the building process.
|
||||
# The resulting binary will be created at: ./programs/Debug/clickhouse
|
||||
```
|
||||
|
||||
@ -91,7 +91,7 @@ cmake --build build
|
||||
|
||||
## Caveats {#caveats}
|
||||
|
||||
If you intend to run `clickhouse-server`, make sure to increase the system’s maxfiles variable.
|
||||
If you intend to run `clickhouse-server`, make sure to increase the system’s `maxfiles` variable.
|
||||
|
||||
:::note
|
||||
You’ll need to use sudo.
|
||||
|
@ -19,7 +19,7 @@ The following tutorial is based on the Ubuntu Linux system. With appropriate cha
|
||||
### Install Git, CMake, Python and Ninja {#install-git-cmake-python-and-ninja}
|
||||
|
||||
``` bash
|
||||
sudo apt-get install git cmake python ninja-build
|
||||
sudo apt-get install git cmake ccache python3 ninja-build
|
||||
```
|
||||
|
||||
Or cmake3 instead of cmake on older systems.
|
||||
@ -77,7 +77,7 @@ The build requires the following components:
|
||||
- Git (is used only to checkout the sources, it’s not needed for the build)
|
||||
- CMake 3.14 or newer
|
||||
- Ninja
|
||||
- C++ compiler: clang-13 or newer
|
||||
- C++ compiler: clang-14 or newer
|
||||
- Linker: lld
|
||||
|
||||
If all the components are installed, you may build in the same way as the steps above.
|
||||
|
@ -19,7 +19,7 @@ cmake .. \
|
||||
|
||||
## CMake files types
|
||||
|
||||
1. ClickHouse's source CMake files (located in the root directory and in /src).
|
||||
1. ClickHouse source CMake files (located in the root directory and in /src).
|
||||
2. Arch-dependent CMake files (located in /cmake/*os_name*).
|
||||
3. Libraries finders (search for contrib libraries, located in /contrib/*/CMakeLists.txt).
|
||||
4. Contrib build CMake files (used instead of libraries' own CMake files, located in /cmake/modules)
|
||||
@ -456,7 +456,7 @@ option(ENABLE_TESTS "Provide unit_test_dbms target with Google.test unit tests"
|
||||
|
||||
#### If the option's state could produce unwanted (or unusual) result, explicitly warn the user.
|
||||
|
||||
Suppose you have an option that may strip debug symbols from the ClickHouse's part.
|
||||
Suppose you have an option that may strip debug symbols from the ClickHouse part.
|
||||
This can speed up the linking process, but produces a binary that cannot be debugged.
|
||||
In that case, prefer explicitly raising a warning telling the developer that he may be doing something wrong.
|
||||
Also, such options should be disabled if applies.
|
||||
|
@ -31,7 +31,7 @@ If you are not sure what to do, ask a maintainer for help.
|
||||
## Merge With Master
|
||||
|
||||
Verifies that the PR can be merged to master. If not, it will fail with the
|
||||
message 'Cannot fetch mergecommit'. To fix this check, resolve the conflict as
|
||||
message `Cannot fetch mergecommit`. To fix this check, resolve the conflict as
|
||||
described in the [GitHub
|
||||
documentation](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/resolving-a-merge-conflict-on-github),
|
||||
or merge the `master` branch to your pull request branch using git.
|
||||
@ -57,7 +57,7 @@ You have to specify a changelog category for your change (e.g., Bug Fix), and
|
||||
write a user-readable message describing the change for [CHANGELOG.md](../whats-new/changelog/)
|
||||
|
||||
|
||||
## Push To Dockerhub
|
||||
## Push To DockerHub
|
||||
|
||||
Builds docker images used for build and tests, then pushes them to DockerHub.
|
||||
|
||||
@ -118,7 +118,7 @@ Builds ClickHouse in various configurations for use in further steps. You have t
|
||||
- **Compiler**: `gcc-9` or `clang-10` (or `clang-10-xx` for other architectures e.g. `clang-10-freebsd`).
|
||||
- **Build type**: `Debug` or `RelWithDebInfo` (cmake).
|
||||
- **Sanitizer**: `none` (without sanitizers), `address` (ASan), `memory` (MSan), `undefined` (UBSan), or `thread` (TSan).
|
||||
- **Splitted** `splitted` is a [split build](../development/build.md#split-build)
|
||||
- **Split** `splitted` is a [split build](../development/build.md#split-build)
|
||||
- **Status**: `success` or `fail`
|
||||
- **Build log**: link to the building and files copying log, useful when build failed.
|
||||
- **Build time**.
|
||||
|
@ -96,9 +96,9 @@ SELECT library_name, license_type, license_path FROM system.licenses ORDER BY li
|
||||
|
||||
## Adding new third-party libraries and maintaining patches in third-party libraries {#adding-third-party-libraries}
|
||||
|
||||
1. Each third-party libary must reside in a dedicated directory under the `contrib/` directory of the ClickHouse repository. Avoid dumps/copies of external code, instead use Git's submodule feature to pull third-party code from an external upstream repository.
|
||||
2. Submodules are listed in `.gitmodule`. If the external library can be used as-is, you may reference the upstream repository directly. Otherwise, i.e. the external libary requires patching/customization, create a fork of the official repository in the [Clickhouse organization in GitHub](https://github.com/ClickHouse).
|
||||
1. Each third-party library must reside in a dedicated directory under the `contrib/` directory of the ClickHouse repository. Avoid dumps/copies of external code, instead use Git submodule feature to pull third-party code from an external upstream repository.
|
||||
2. Submodules are listed in `.gitmodule`. If the external library can be used as-is, you may reference the upstream repository directly. Otherwise, i.e. the external library requires patching/customization, create a fork of the official repository in the [Clickhouse organization in GitHub](https://github.com/ClickHouse).
|
||||
3. In the latter case, create a branch with `clickhouse/` prefix from the branch you want to integrate, e.g. `clickhouse/master` (for `master`) or `clickhouse/release/vX.Y.Z` (for a `release/vX.Y.Z` tag). The purpose of this branch is to isolate customization of the library from upstream work. For example, pulls from the upstream repository into the fork will leave all `clickhouse/` branches unaffected. Submodules in `contrib/` must only track `clickhouse/` branches of forked third-party repositories.
|
||||
4. To patch a fork of a third-party library, create a dedicated branch with `clickhouse/` prefix in the fork, e.g. `clickhouse/fix-some-desaster`. Finally, merge the patch branch into the custom tracking branch (e.g. `clickhouse/master` or `clickhouse/release/vX.Y.Z`) using a PR.
|
||||
5. Always create patches of third-party libraries with the official repository in mind. Once a PR of a patch branch to the `clickhouse/` branch in the fork repository is done and the submodule version in ClickHouse's official repository is bumped, consider opening another PR from the patch branch to the upstream library repository. This ensures, that 1) the contribution has more than a single use case and importance, 2) others will also benefit from it, 3) the change will not remain a maintenance burden solely on ClickHouse developers.
|
||||
5. Always create patches of third-party libraries with the official repository in mind. Once a PR of a patch branch to the `clickhouse/` branch in the fork repository is done and the submodule version in ClickHouse official repository is bumped, consider opening another PR from the patch branch to the upstream library repository. This ensures, that 1) the contribution has more than a single use case and importance, 2) others will also benefit from it, 3) the change will not remain a maintenance burden solely on ClickHouse developers.
|
||||
9. To update a submodule with changes in the upstream repository, first merge upstream `master` (or a new `versionX.Y.Z` tag) into the `clickhouse`-tracking branch in the fork repository. Conflicts with patches/customization will need to be resolved in this merge (see Step 4.). Once the merge is done, bump the submodule in ClickHouse to point to the new hash in the fork.
|
||||
|
@ -70,7 +70,7 @@ You can also clone the repository via https protocol:
|
||||
|
||||
This, however, will not let you send your changes to the server. You can still use it temporarily and add the SSH keys later replacing the remote address of the repository with `git remote` command.
|
||||
|
||||
You can also add original ClickHouse repo’s address to your local repository to pull updates from there:
|
||||
You can also add original ClickHouse repo address to your local repository to pull updates from there:
|
||||
|
||||
git remote add upstream git@github.com:ClickHouse/ClickHouse.git
|
||||
|
||||
@ -155,7 +155,7 @@ While inside the `build` directory, configure your build by running CMake. Befor
|
||||
export CC=clang CXX=clang++
|
||||
cmake ..
|
||||
|
||||
If you installed clang using the automatic installation script above, also specify the version of clang installed in the first command, e.g. `export CC=clang-13 CXX=clang++-13`. The clang version will be in the script output.
|
||||
If you installed clang using the automatic installation script above, also specify the version of clang installed in the first command, e.g. `export CC=clang-14 CXX=clang++-14`. The clang version will be in the script output.
|
||||
|
||||
The `CC` variable specifies the compiler for C (short for C Compiler), and `CXX` variable instructs which C++ compiler is to be used for building.
|
||||
|
||||
@ -177,7 +177,7 @@ If you require to build all the binaries (utilities and tests), you should run n
|
||||
|
||||
Full build requires about 30GB of free disk space or 15GB to build the main binaries.
|
||||
|
||||
When a large amount of RAM is available on build machine you should limit the number of build tasks run in parallel with `-j` param:
|
||||
When a large amount of RAM is available on build machine you should limit the number of build tasks run in parallel with `-j` parameter:
|
||||
|
||||
ninja -j 1 clickhouse-server clickhouse-client
|
||||
|
||||
@ -269,7 +269,7 @@ Developing ClickHouse often requires loading realistic datasets. It is particula
|
||||
|
||||
Navigate to your fork repository in GitHub’s UI. If you have been developing in a branch, you need to select that branch. There will be a “Pull request” button located on the screen. In essence, this means “create a request for accepting my changes into the main repository”.
|
||||
|
||||
A pull request can be created even if the work is not completed yet. In this case please put the word “WIP” (work in progress) at the beginning of the title, it can be changed later. This is useful for cooperative reviewing and discussion of changes as well as for running all of the available tests. It is important that you provide a brief description of your changes, it will later be used for generating release changelogs.
|
||||
A pull request can be created even if the work is not completed yet. In this case please put the word “WIP” (work in progress) at the beginning of the title, it can be changed later. This is useful for cooperative reviewing and discussion of changes as well as for running all of the available tests. It is important that you provide a brief description of your changes, it will later be used for generating release changelog.
|
||||
|
||||
Testing will commence as soon as ClickHouse employees label your PR with a tag “can be tested”. The results of some first checks (e.g. code style) will come in within several minutes. Build check results will arrive within half an hour. And the main set of tests will report itself within an hour.
|
||||
|
||||
|
@ -3,6 +3,6 @@ sidebar_label: Development
|
||||
sidebar_position: 58
|
||||
---
|
||||
|
||||
# ClickHouse Development {#clickhouse-development}
|
||||
# ClickHouse Development
|
||||
|
||||
[Original article](https://clickhouse.com/docs/en/development/) <!--hide-->
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
Rust library integration will be described based on BLAKE3 hash-function integration.
|
||||
|
||||
The first step is forking a library and making neccessary changes for Rust and C/C++ compatibility.
|
||||
The first step is forking a library and making necessary changes for Rust and C/C++ compatibility.
|
||||
|
||||
After forking library repository you need to change target settings in Cargo.toml file. Firstly, you need to switch build to static library. Secondly, you need to add cbindgen crate to the crate list. We will use it later to generate C-header automatically.
|
||||
|
||||
@ -51,9 +51,9 @@ pub unsafe extern "C" fn blake3_apply_shim(
|
||||
}
|
||||
```
|
||||
|
||||
This method gets C-compatible string, its size and output string pointer as input. Then, it converts C-compatible inputs into types that are used by actual library methods and calls them. After that, it should convert library methods' outputs back into C-compatible type. In that particular case library supported direct writing into pointer by method fill(), so the convertion was not needed. The main advice here is to create less methods, so you will need to do less convertions on each method call and won't create much overhead.
|
||||
This method gets C-compatible string, its size and output string pointer as input. Then, it converts C-compatible inputs into types that are used by actual library methods and calls them. After that, it should convert library methods' outputs back into C-compatible type. In that particular case library supported direct writing into pointer by method fill(), so the conversion was not needed. The main advice here is to create less methods, so you will need to do less conversions on each method call and won't create much overhead.
|
||||
|
||||
Also, you should use attribute #[no_mangle] and extern "C" for every C-compatible attribute. Without it library can compile incorrectly and cbindgen won't launch header autogeneration.
|
||||
Also, you should use attribute #[no_mangle] and `extern "C"` for every C-compatible attribute. Without it library can compile incorrectly and cbindgen won't launch header autogeneration.
|
||||
|
||||
After all these steps you can test your library in a small project to find all problems with compatibility or header generation. If any problems occur during header generation, you can try to configure it with cbindgen.toml file (you can find an example of it in BLAKE3 directory or a template here: [https://github.com/eqrion/cbindgen/blob/master/template.toml](https://github.com/eqrion/cbindgen/blob/master/template.toml)). If everything works correctly, you can finally integrate its methods into ClickHouse.
|
||||
|
||||
|
@ -196,7 +196,7 @@ std::cerr << static_cast<int>(c) << std::endl;
|
||||
|
||||
The same is true for small methods in any classes or structs.
|
||||
|
||||
For templated classes and structs, do not separate the method declarations from the implementation (because otherwise they must be defined in the same translation unit).
|
||||
For template classes and structs, do not separate the method declarations from the implementation (because otherwise they must be defined in the same translation unit).
|
||||
|
||||
**31.** You can wrap lines at 140 characters, instead of 80.
|
||||
|
||||
@ -285,7 +285,7 @@ Note: You can use Doxygen to generate documentation from these comments. But Dox
|
||||
/// WHAT THE FAIL???
|
||||
```
|
||||
|
||||
**14.** Do not use comments to make delimeters.
|
||||
**14.** Do not use comments to make delimiters.
|
||||
|
||||
``` cpp
|
||||
///******************************************************
|
||||
@ -491,7 +491,7 @@ if (0 != close(fd))
|
||||
throwFromErrno("Cannot close file " + file_name, ErrorCodes::CANNOT_CLOSE_FILE);
|
||||
```
|
||||
|
||||
You can use assert to check invariants in code.
|
||||
You can use assert to check invariant in code.
|
||||
|
||||
**4.** Exception types.
|
||||
|
||||
@ -552,9 +552,9 @@ Do not try to implement lock-free data structures unless it is your primary area
|
||||
|
||||
In most cases, prefer references.
|
||||
|
||||
**10.** const.
|
||||
**10.** `const`.
|
||||
|
||||
Use constant references, pointers to constants, `const_iterator`, and const methods.
|
||||
Use constant references, pointers to constants, `const_iterator`, and `const` methods.
|
||||
|
||||
Consider `const` to be default and use non-`const` only when necessary.
|
||||
|
||||
@ -596,7 +596,7 @@ public:
|
||||
AggregateFunctionPtr get(const String & name, const DataTypes & argument_types) const;
|
||||
```
|
||||
|
||||
**15.** namespace.
|
||||
**15.** `namespace`.
|
||||
|
||||
There is no need to use a separate `namespace` for application code.
|
||||
|
||||
@ -606,7 +606,7 @@ For medium to large libraries, put everything in a `namespace`.
|
||||
|
||||
In the library’s `.h` file, you can use `namespace detail` to hide implementation details not needed for the application code.
|
||||
|
||||
In a `.cpp` file, you can use a `static` or anonymous namespace to hide symbols.
|
||||
In a `.cpp` file, you can use a `static` or anonymous `namespace` to hide symbols.
|
||||
|
||||
Also, a `namespace` can be used for an `enum` to prevent the corresponding names from falling into an external `namespace` (but it’s better to use an `enum class`).
|
||||
|
||||
|
@ -81,11 +81,11 @@ $ ./src/unit_tests_dbms --gtest_filter=LocalAddress*
|
||||
|
||||
## Performance Tests {#performance-tests}
|
||||
|
||||
Performance tests allow to measure and compare performance of some isolated part of ClickHouse on synthetic queries. Tests are located at `tests/performance`. Each test is represented by `.xml` file with description of test case. Tests are run with `docker/tests/performance-comparison` tool . See the readme file for invocation.
|
||||
Performance tests allow to measure and compare performance of some isolated part of ClickHouse on synthetic queries. Tests are located at `tests/performance`. Each test is represented by `.xml` file with description of test case. Tests are run with `docker/test/performance-comparison` tool . See the readme file for invocation.
|
||||
|
||||
Each test run one or multiple queries (possibly with combinations of parameters) in a loop.
|
||||
|
||||
If you want to improve performance of ClickHouse in some scenario, and if improvements can be observed on simple queries, it is highly recommended to write a performance test. It always makes sense to use `perf top` or other perf tools during your tests.
|
||||
If you want to improve performance of ClickHouse in some scenario, and if improvements can be observed on simple queries, it is highly recommended to write a performance test. It always makes sense to use `perf top` or other `perf` tools during your tests.
|
||||
|
||||
## Test Tools and Scripts {#test-tools-and-scripts}
|
||||
|
||||
@ -244,7 +244,7 @@ In debug build we also involve a customization of libc that ensures that no "har
|
||||
|
||||
Debug assertions are used extensively.
|
||||
|
||||
In debug build, if exception with "logical error" code (implies a bug) is being thrown, the program is terminated prematurally. It allows to use exceptions in release build but make it an assertion in debug build.
|
||||
In debug build, if exception with "logical error" code (implies a bug) is being thrown, the program is terminated prematurely. It allows to use exceptions in release build but make it an assertion in debug build.
|
||||
|
||||
Debug version of jemalloc is used for debug builds.
|
||||
Debug version of libc++ is used for debug builds.
|
||||
@ -253,7 +253,7 @@ Debug version of libc++ is used for debug builds.
|
||||
|
||||
Data stored on disk is checksummed. Data in MergeTree tables is checksummed in three ways simultaneously* (compressed data blocks, uncompressed data blocks, the total checksum across blocks). Data transferred over network between client and server or between servers is also checksummed. Replication ensures bit-identical data on replicas.
|
||||
|
||||
It is required to protect from faulty hardware (bit rot on storage media, bit flips in RAM on server, bit flips in RAM of network controller, bit flips in RAM of network switch, bit flips in RAM of client, bit flips on the wire). Note that bit flips are common and likely to occur even for ECC RAM and in presense of TCP checksums (if you manage to run thousands of servers processing petabytes of data each day). [See the video (russian)](https://www.youtube.com/watch?v=ooBAQIe0KlQ).
|
||||
It is required to protect from faulty hardware (bit rot on storage media, bit flips in RAM on server, bit flips in RAM of network controller, bit flips in RAM of network switch, bit flips in RAM of client, bit flips on the wire). Note that bit flips are common and likely to occur even for ECC RAM and in presence of TCP checksums (if you manage to run thousands of servers processing petabytes of data each day). [See the video (russian)](https://www.youtube.com/watch?v=ooBAQIe0KlQ).
|
||||
|
||||
ClickHouse provides diagnostics that will help ops engineers to find faulty hardware.
|
||||
|
||||
|
@ -4,7 +4,7 @@ toc_priority: 27
|
||||
toc_title: Introduction
|
||||
---
|
||||
|
||||
# Database Engines {#database-engines}
|
||||
# Database Engines
|
||||
|
||||
Database engines allow you to work with tables. By default, ClickHouse uses the [Atomic](../../engines/database-engines/atomic.md) database engine, which provides configurable [table engines](../../engines/table-engines/index.md) and an [SQL dialect](../../sql-reference/syntax.md).
|
||||
|
||||
|
@ -3,7 +3,7 @@ sidebar_label: Lazy
|
||||
sidebar_position: 20
|
||||
---
|
||||
|
||||
# Lazy {#lazy}
|
||||
# Lazy
|
||||
|
||||
Keeps tables in RAM only `expiration_time_in_seconds` seconds after last access. Can be used only with \*Log tables.
|
||||
|
||||
|
@ -3,7 +3,7 @@ sidebar_label: MaterializedPostgreSQL
|
||||
sidebar_position: 60
|
||||
---
|
||||
|
||||
# [experimental] MaterializedPostgreSQL {#materialize-postgresql}
|
||||
# [experimental] MaterializedPostgreSQL
|
||||
|
||||
Creates a ClickHouse database with tables from PostgreSQL database. Firstly, database with engine `MaterializedPostgreSQL` creates a snapshot of PostgreSQL database and loads required tables. Required tables can include any subset of tables from any subset of schemas from specified database. Along with the snapshot database engine acquires LSN and once initial dump of tables is performed - it starts pulling updates from WAL. After database is created, newly added tables to PostgreSQL database are not automatically added to replication. They have to be added manually with `ATTACH TABLE db.table` query.
|
||||
|
||||
|
@ -3,7 +3,7 @@ sidebar_position: 40
|
||||
sidebar_label: PostgreSQL
|
||||
---
|
||||
|
||||
# PostgreSQL {#postgresql}
|
||||
# PostgreSQL
|
||||
|
||||
Allows to connect to databases on a remote [PostgreSQL](https://www.postgresql.org) server. Supports read and write operations (`SELECT` and `INSERT` queries) to exchange data between ClickHouse and PostgreSQL.
|
||||
|
||||
|
@ -3,7 +3,7 @@ sidebar_position: 30
|
||||
sidebar_label: Replicated
|
||||
---
|
||||
|
||||
# [experimental] Replicated {#replicated}
|
||||
# [experimental] Replicated
|
||||
|
||||
The engine is based on the [Atomic](../../engines/database-engines/atomic.md) engine. It supports replication of metadata via DDL log being written to ZooKeeper and executed on all of the replicas for a given database.
|
||||
|
||||
|
@ -3,7 +3,7 @@ sidebar_position: 55
|
||||
sidebar_label: SQLite
|
||||
---
|
||||
|
||||
# SQLite {#sqlite}
|
||||
# SQLite
|
||||
|
||||
Allows to connect to [SQLite](https://www.sqlite.org/index.html) database and perform `INSERT` and `SELECT` queries to exchange data between ClickHouse and SQLite.
|
||||
|
||||
|
@ -4,7 +4,7 @@ toc_priority: 26
|
||||
toc_title: Introduction
|
||||
---
|
||||
|
||||
# Table Engines {#table_engines}
|
||||
# Table Engines
|
||||
|
||||
The table engine (type of table) determines:
|
||||
|
||||
@ -12,7 +12,7 @@ The table engine (type of table) determines:
|
||||
- Which queries are supported, and how.
|
||||
- Concurrent data access.
|
||||
- Use of indexes, if present.
|
||||
- Whether multithreaded request execution is possible.
|
||||
- Whether multithread request execution is possible.
|
||||
- Data replication parameters.
|
||||
|
||||
## Engine Families {#engine-families}
|
||||
|
@ -3,7 +3,7 @@ sidebar_position: 12
|
||||
sidebar_label: ExternalDistributed
|
||||
---
|
||||
|
||||
# ExternalDistributed {#externaldistributed}
|
||||
# ExternalDistributed
|
||||
|
||||
The `ExternalDistributed` engine allows to perform `SELECT` queries on data that is stored on a remote servers MySQL or PostgreSQL. Accepts [MySQL](../../../engines/table-engines/integrations/mysql.md) or [PostgreSQL](../../../engines/table-engines/integrations/postgresql.md) engines as an argument so sharding is possible.
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user