mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-23 08:02:02 +00:00
Merge branch 'master' into issue_#50808
This commit is contained in:
commit
cb30e78bec
259
.github/workflows/master.yml
vendored
259
.github/workflows/master.yml
vendored
@ -850,6 +850,48 @@ jobs:
|
|||||||
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
|
BuilderBinRISCV64:
|
||||||
|
needs: [DockerHubPush]
|
||||||
|
runs-on: [self-hosted, builder]
|
||||||
|
steps:
|
||||||
|
- name: Set envs
|
||||||
|
run: |
|
||||||
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
|
TEMP_PATH=${{runner.temp}}/build_check
|
||||||
|
IMAGES_PATH=${{runner.temp}}/images_path
|
||||||
|
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
||||||
|
CACHES_PATH=${{runner.temp}}/../ccaches
|
||||||
|
BUILD_NAME=binary_riscv64
|
||||||
|
EOF
|
||||||
|
- name: Download changed images
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: changed_images
|
||||||
|
path: ${{ env.IMAGES_PATH }}
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
submodules: true
|
||||||
|
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||||
|
- name: Build
|
||||||
|
run: |
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
mkdir -p "$TEMP_PATH"
|
||||||
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
|
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||||
|
- name: Upload build URLs to artifacts
|
||||||
|
if: ${{ success() || failure() }}
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: ${{ env.BUILD_URLS }}
|
||||||
|
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
||||||
|
- name: Cleanup
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
############################################################################################
|
############################################################################################
|
||||||
##################################### Docker images #######################################
|
##################################### Docker images #######################################
|
||||||
############################################################################################
|
############################################################################################
|
||||||
@ -932,6 +974,7 @@ jobs:
|
|||||||
- BuilderBinDarwinAarch64
|
- BuilderBinDarwinAarch64
|
||||||
- BuilderBinFreeBSD
|
- BuilderBinFreeBSD
|
||||||
- BuilderBinPPC64
|
- BuilderBinPPC64
|
||||||
|
- BuilderBinRISCV64
|
||||||
- BuilderBinAmd64Compat
|
- BuilderBinAmd64Compat
|
||||||
- BuilderBinAarch64V80Compat
|
- BuilderBinAarch64V80Compat
|
||||||
- BuilderBinClangTidy
|
- BuilderBinClangTidy
|
||||||
@ -2827,6 +2870,216 @@ jobs:
|
|||||||
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
IntegrationTestsAnalyzerAsan0:
|
||||||
|
needs: [BuilderDebAsan]
|
||||||
|
runs-on: [self-hosted, stress-tester]
|
||||||
|
steps:
|
||||||
|
- name: Set envs
|
||||||
|
run: |
|
||||||
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
|
TEMP_PATH=${{runner.temp}}/integration_tests_asan
|
||||||
|
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||||
|
CHECK_NAME=Integration tests (asan, analyzer)
|
||||||
|
REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse
|
||||||
|
RUN_BY_HASH_NUM=0
|
||||||
|
RUN_BY_HASH_TOTAL=6
|
||||||
|
EOF
|
||||||
|
- name: Download json reports
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
path: ${{ env.REPORTS_PATH }}
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
- name: Integration test
|
||||||
|
run: |
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
mkdir -p "$TEMP_PATH"
|
||||||
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
|
cd "$REPO_COPY/tests/ci"
|
||||||
|
python3 integration_test_check.py "$CHECK_NAME"
|
||||||
|
- name: Cleanup
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
IntegrationTestsAnalyzerAsan1:
|
||||||
|
needs: [BuilderDebAsan]
|
||||||
|
runs-on: [self-hosted, stress-tester]
|
||||||
|
steps:
|
||||||
|
- name: Set envs
|
||||||
|
run: |
|
||||||
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
|
TEMP_PATH=${{runner.temp}}/integration_tests_asan
|
||||||
|
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||||
|
CHECK_NAME=Integration tests (asan, analyzer)
|
||||||
|
REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse
|
||||||
|
RUN_BY_HASH_NUM=1
|
||||||
|
RUN_BY_HASH_TOTAL=6
|
||||||
|
EOF
|
||||||
|
- name: Download json reports
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
path: ${{ env.REPORTS_PATH }}
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
- name: Integration test
|
||||||
|
run: |
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
mkdir -p "$TEMP_PATH"
|
||||||
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
|
cd "$REPO_COPY/tests/ci"
|
||||||
|
python3 integration_test_check.py "$CHECK_NAME"
|
||||||
|
- name: Cleanup
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
IntegrationTestsAnalyzerAsan2:
|
||||||
|
needs: [BuilderDebAsan]
|
||||||
|
runs-on: [self-hosted, stress-tester]
|
||||||
|
steps:
|
||||||
|
- name: Set envs
|
||||||
|
run: |
|
||||||
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
|
TEMP_PATH=${{runner.temp}}/integration_tests_asan
|
||||||
|
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||||
|
CHECK_NAME=Integration tests (asan, analyzer)
|
||||||
|
REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse
|
||||||
|
RUN_BY_HASH_NUM=2
|
||||||
|
RUN_BY_HASH_TOTAL=6
|
||||||
|
EOF
|
||||||
|
- name: Download json reports
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
path: ${{ env.REPORTS_PATH }}
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
- name: Integration test
|
||||||
|
run: |
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
mkdir -p "$TEMP_PATH"
|
||||||
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
|
cd "$REPO_COPY/tests/ci"
|
||||||
|
python3 integration_test_check.py "$CHECK_NAME"
|
||||||
|
- name: Cleanup
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
IntegrationTestsAnalyzerAsan3:
|
||||||
|
needs: [BuilderDebAsan]
|
||||||
|
runs-on: [self-hosted, stress-tester]
|
||||||
|
steps:
|
||||||
|
- name: Set envs
|
||||||
|
run: |
|
||||||
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
|
TEMP_PATH=${{runner.temp}}/integration_tests_asan
|
||||||
|
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||||
|
CHECK_NAME=Integration tests (asan, analyzer)
|
||||||
|
REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse
|
||||||
|
RUN_BY_HASH_NUM=3
|
||||||
|
RUN_BY_HASH_TOTAL=6
|
||||||
|
EOF
|
||||||
|
- name: Download json reports
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
path: ${{ env.REPORTS_PATH }}
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
- name: Integration test
|
||||||
|
run: |
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
mkdir -p "$TEMP_PATH"
|
||||||
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
|
cd "$REPO_COPY/tests/ci"
|
||||||
|
python3 integration_test_check.py "$CHECK_NAME"
|
||||||
|
- name: Cleanup
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
IntegrationTestsAnalyzerAsan4:
|
||||||
|
needs: [BuilderDebAsan]
|
||||||
|
runs-on: [self-hosted, stress-tester]
|
||||||
|
steps:
|
||||||
|
- name: Set envs
|
||||||
|
run: |
|
||||||
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
|
TEMP_PATH=${{runner.temp}}/integration_tests_asan
|
||||||
|
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||||
|
CHECK_NAME=Integration tests (asan, analyzer)
|
||||||
|
REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse
|
||||||
|
RUN_BY_HASH_NUM=4
|
||||||
|
RUN_BY_HASH_TOTAL=6
|
||||||
|
EOF
|
||||||
|
- name: Download json reports
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
path: ${{ env.REPORTS_PATH }}
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
- name: Integration test
|
||||||
|
run: |
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
mkdir -p "$TEMP_PATH"
|
||||||
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
|
cd "$REPO_COPY/tests/ci"
|
||||||
|
python3 integration_test_check.py "$CHECK_NAME"
|
||||||
|
- name: Cleanup
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
IntegrationTestsAnalyzerAsan5:
|
||||||
|
needs: [BuilderDebAsan]
|
||||||
|
runs-on: [self-hosted, stress-tester]
|
||||||
|
steps:
|
||||||
|
- name: Set envs
|
||||||
|
run: |
|
||||||
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
|
TEMP_PATH=${{runner.temp}}/integration_tests_asan
|
||||||
|
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||||
|
CHECK_NAME=Integration tests (asan, analyzer)
|
||||||
|
REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse
|
||||||
|
RUN_BY_HASH_NUM=5
|
||||||
|
RUN_BY_HASH_TOTAL=6
|
||||||
|
EOF
|
||||||
|
- name: Download json reports
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
path: ${{ env.REPORTS_PATH }}
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
- name: Integration test
|
||||||
|
run: |
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
mkdir -p "$TEMP_PATH"
|
||||||
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
|
cd "$REPO_COPY/tests/ci"
|
||||||
|
python3 integration_test_check.py "$CHECK_NAME"
|
||||||
|
- name: Cleanup
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
IntegrationTestsTsan0:
|
IntegrationTestsTsan0:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
runs-on: [self-hosted, stress-tester]
|
runs-on: [self-hosted, stress-tester]
|
||||||
@ -3920,6 +4173,12 @@ jobs:
|
|||||||
- IntegrationTestsAsan3
|
- IntegrationTestsAsan3
|
||||||
- IntegrationTestsAsan4
|
- IntegrationTestsAsan4
|
||||||
- IntegrationTestsAsan5
|
- IntegrationTestsAsan5
|
||||||
|
- IntegrationTestsAnalyzerAsan0
|
||||||
|
- IntegrationTestsAnalyzerAsan1
|
||||||
|
- IntegrationTestsAnalyzerAsan2
|
||||||
|
- IntegrationTestsAnalyzerAsan3
|
||||||
|
- IntegrationTestsAnalyzerAsan4
|
||||||
|
- IntegrationTestsAnalyzerAsan5
|
||||||
- IntegrationTestsRelease0
|
- IntegrationTestsRelease0
|
||||||
- IntegrationTestsRelease1
|
- IntegrationTestsRelease1
|
||||||
- IntegrationTestsRelease2
|
- IntegrationTestsRelease2
|
||||||
|
52
.github/workflows/nightly.yml
vendored
52
.github/workflows/nightly.yml
vendored
@ -75,54 +75,7 @@ jobs:
|
|||||||
Codebrowser:
|
Codebrowser:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
uses: ./.github/workflows/woboq.yml
|
uses: ./.github/workflows/woboq.yml
|
||||||
BuilderCoverity:
|
|
||||||
needs: DockerHubPush
|
|
||||||
runs-on: [self-hosted, builder]
|
|
||||||
steps:
|
|
||||||
- name: Set envs
|
|
||||||
run: |
|
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
|
||||||
BUILD_NAME=coverity
|
|
||||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
|
||||||
IMAGES_PATH=${{runner.temp}}/images_path
|
|
||||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
|
||||||
TEMP_PATH=${{runner.temp}}/build_check
|
|
||||||
EOF
|
|
||||||
echo "COVERITY_TOKEN=${{ secrets.COVERITY_TOKEN }}" >> "$GITHUB_ENV"
|
|
||||||
- name: Download changed images
|
|
||||||
uses: actions/download-artifact@v3
|
|
||||||
with:
|
|
||||||
name: changed_images
|
|
||||||
path: ${{ env.IMAGES_PATH }}
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: ClickHouse/checkout@v1
|
|
||||||
with:
|
|
||||||
clear-repository: true
|
|
||||||
submodules: true
|
|
||||||
- name: Build
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
|
||||||
- name: Upload Coverity Analysis
|
|
||||||
if: ${{ success() || failure() }}
|
|
||||||
run: |
|
|
||||||
curl --form token="${COVERITY_TOKEN}" \
|
|
||||||
--form email='security+coverity@clickhouse.com' \
|
|
||||||
--form file="@$TEMP_PATH/$BUILD_NAME/coverity-scan.tar.gz" \
|
|
||||||
--form version="${GITHUB_REF#refs/heads/}-${GITHUB_SHA::6}" \
|
|
||||||
--form description="Nighly Scan: $(date +'%Y-%m-%dT%H:%M:%S')" \
|
|
||||||
https://scan.coverity.com/builds?project=ClickHouse%2FClickHouse
|
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
|
||||||
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
|
||||||
SonarCloud:
|
SonarCloud:
|
||||||
# TODO: Remove if: whenever SonarCloud supports c++23
|
|
||||||
if: ${{ false }}
|
|
||||||
runs-on: [self-hosted, builder]
|
runs-on: [self-hosted, builder]
|
||||||
env:
|
env:
|
||||||
SONAR_SCANNER_VERSION: 4.8.0.2856
|
SONAR_SCANNER_VERSION: 4.8.0.2856
|
||||||
@ -159,7 +112,7 @@ jobs:
|
|||||||
- name: Set Up Build Tools
|
- name: Set Up Build Tools
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install -yq git cmake ccache ninja-build python3 yasm
|
sudo apt-get install -yq git cmake ccache ninja-build python3 yasm nasm
|
||||||
sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)"
|
sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)"
|
||||||
- name: Run build-wrapper
|
- name: Run build-wrapper
|
||||||
run: |
|
run: |
|
||||||
@ -178,4 +131,5 @@ jobs:
|
|||||||
--define sonar.cfamily.build-wrapper-output="${{ env.BUILD_WRAPPER_OUT_DIR }}" \
|
--define sonar.cfamily.build-wrapper-output="${{ env.BUILD_WRAPPER_OUT_DIR }}" \
|
||||||
--define sonar.projectKey="ClickHouse_ClickHouse" \
|
--define sonar.projectKey="ClickHouse_ClickHouse" \
|
||||||
--define sonar.organization="clickhouse-java" \
|
--define sonar.organization="clickhouse-java" \
|
||||||
--define sonar.exclusions="**/*.java,**/*.ts,**/*.js,**/*.css,**/*.sql" \
|
--define sonar.cfamily.cpp23.enabled=true \
|
||||||
|
--define sonar.exclusions="**/*.java,**/*.ts,**/*.js,**/*.css,**/*.sql"
|
||||||
|
258
.github/workflows/pull_request.yml
vendored
258
.github/workflows/pull_request.yml
vendored
@ -911,6 +911,47 @@ jobs:
|
|||||||
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
|
BuilderBinRISCV64:
|
||||||
|
needs: [DockerHubPush, FastTest, StyleCheck]
|
||||||
|
runs-on: [self-hosted, builder]
|
||||||
|
steps:
|
||||||
|
- name: Set envs
|
||||||
|
run: |
|
||||||
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
|
TEMP_PATH=${{runner.temp}}/build_check
|
||||||
|
IMAGES_PATH=${{runner.temp}}/images_path
|
||||||
|
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
||||||
|
CACHES_PATH=${{runner.temp}}/../ccaches
|
||||||
|
BUILD_NAME=binary_riscv64
|
||||||
|
EOF
|
||||||
|
- name: Download changed images
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: changed_images
|
||||||
|
path: ${{ env.IMAGES_PATH }}
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
submodules: true
|
||||||
|
- name: Build
|
||||||
|
run: |
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
mkdir -p "$TEMP_PATH"
|
||||||
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
|
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||||
|
- name: Upload build URLs to artifacts
|
||||||
|
if: ${{ success() || failure() }}
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: ${{ env.BUILD_URLS }}
|
||||||
|
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
||||||
|
- name: Cleanup
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
############################################################################################
|
############################################################################################
|
||||||
##################################### Docker images #######################################
|
##################################### Docker images #######################################
|
||||||
############################################################################################
|
############################################################################################
|
||||||
@ -992,6 +1033,7 @@ jobs:
|
|||||||
- BuilderBinDarwinAarch64
|
- BuilderBinDarwinAarch64
|
||||||
- BuilderBinFreeBSD
|
- BuilderBinFreeBSD
|
||||||
- BuilderBinPPC64
|
- BuilderBinPPC64
|
||||||
|
- BuilderBinRISCV64
|
||||||
- BuilderBinAmd64Compat
|
- BuilderBinAmd64Compat
|
||||||
- BuilderBinAarch64V80Compat
|
- BuilderBinAarch64V80Compat
|
||||||
- BuilderBinClangTidy
|
- BuilderBinClangTidy
|
||||||
@ -3861,6 +3903,216 @@ jobs:
|
|||||||
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
IntegrationTestsAnalyzerAsan0:
|
||||||
|
needs: [BuilderDebAsan]
|
||||||
|
runs-on: [self-hosted, stress-tester]
|
||||||
|
steps:
|
||||||
|
- name: Set envs
|
||||||
|
run: |
|
||||||
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
|
TEMP_PATH=${{runner.temp}}/integration_tests_asan
|
||||||
|
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||||
|
CHECK_NAME=Integration tests (asan, analyzer)
|
||||||
|
REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse
|
||||||
|
RUN_BY_HASH_NUM=0
|
||||||
|
RUN_BY_HASH_TOTAL=6
|
||||||
|
EOF
|
||||||
|
- name: Download json reports
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
path: ${{ env.REPORTS_PATH }}
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
- name: Integration test
|
||||||
|
run: |
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
mkdir -p "$TEMP_PATH"
|
||||||
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
|
cd "$REPO_COPY/tests/ci"
|
||||||
|
python3 integration_test_check.py "$CHECK_NAME"
|
||||||
|
- name: Cleanup
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
IntegrationTestsAnalyzerAsan1:
|
||||||
|
needs: [BuilderDebAsan]
|
||||||
|
runs-on: [self-hosted, stress-tester]
|
||||||
|
steps:
|
||||||
|
- name: Set envs
|
||||||
|
run: |
|
||||||
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
|
TEMP_PATH=${{runner.temp}}/integration_tests_asan
|
||||||
|
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||||
|
CHECK_NAME=Integration tests (asan, analyzer)
|
||||||
|
REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse
|
||||||
|
RUN_BY_HASH_NUM=1
|
||||||
|
RUN_BY_HASH_TOTAL=6
|
||||||
|
EOF
|
||||||
|
- name: Download json reports
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
path: ${{ env.REPORTS_PATH }}
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
- name: Integration test
|
||||||
|
run: |
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
mkdir -p "$TEMP_PATH"
|
||||||
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
|
cd "$REPO_COPY/tests/ci"
|
||||||
|
python3 integration_test_check.py "$CHECK_NAME"
|
||||||
|
- name: Cleanup
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
IntegrationTestsAnalyzerAsan2:
|
||||||
|
needs: [BuilderDebAsan]
|
||||||
|
runs-on: [self-hosted, stress-tester]
|
||||||
|
steps:
|
||||||
|
- name: Set envs
|
||||||
|
run: |
|
||||||
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
|
TEMP_PATH=${{runner.temp}}/integration_tests_asan
|
||||||
|
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||||
|
CHECK_NAME=Integration tests (asan, analyzer)
|
||||||
|
REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse
|
||||||
|
RUN_BY_HASH_NUM=2
|
||||||
|
RUN_BY_HASH_TOTAL=6
|
||||||
|
EOF
|
||||||
|
- name: Download json reports
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
path: ${{ env.REPORTS_PATH }}
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
- name: Integration test
|
||||||
|
run: |
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
mkdir -p "$TEMP_PATH"
|
||||||
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
|
cd "$REPO_COPY/tests/ci"
|
||||||
|
python3 integration_test_check.py "$CHECK_NAME"
|
||||||
|
- name: Cleanup
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
IntegrationTestsAnalyzerAsan3:
|
||||||
|
needs: [BuilderDebAsan]
|
||||||
|
runs-on: [self-hosted, stress-tester]
|
||||||
|
steps:
|
||||||
|
- name: Set envs
|
||||||
|
run: |
|
||||||
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
|
TEMP_PATH=${{runner.temp}}/integration_tests_asan
|
||||||
|
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||||
|
CHECK_NAME=Integration tests (asan, analyzer)
|
||||||
|
REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse
|
||||||
|
RUN_BY_HASH_NUM=3
|
||||||
|
RUN_BY_HASH_TOTAL=6
|
||||||
|
EOF
|
||||||
|
- name: Download json reports
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
path: ${{ env.REPORTS_PATH }}
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
- name: Integration test
|
||||||
|
run: |
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
mkdir -p "$TEMP_PATH"
|
||||||
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
|
cd "$REPO_COPY/tests/ci"
|
||||||
|
python3 integration_test_check.py "$CHECK_NAME"
|
||||||
|
- name: Cleanup
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
IntegrationTestsAnalyzerAsan4:
|
||||||
|
needs: [BuilderDebAsan]
|
||||||
|
runs-on: [self-hosted, stress-tester]
|
||||||
|
steps:
|
||||||
|
- name: Set envs
|
||||||
|
run: |
|
||||||
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
|
TEMP_PATH=${{runner.temp}}/integration_tests_asan
|
||||||
|
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||||
|
CHECK_NAME=Integration tests (asan, analyzer)
|
||||||
|
REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse
|
||||||
|
RUN_BY_HASH_NUM=4
|
||||||
|
RUN_BY_HASH_TOTAL=6
|
||||||
|
EOF
|
||||||
|
- name: Download json reports
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
path: ${{ env.REPORTS_PATH }}
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
- name: Integration test
|
||||||
|
run: |
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
mkdir -p "$TEMP_PATH"
|
||||||
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
|
cd "$REPO_COPY/tests/ci"
|
||||||
|
python3 integration_test_check.py "$CHECK_NAME"
|
||||||
|
- name: Cleanup
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
IntegrationTestsAnalyzerAsan5:
|
||||||
|
needs: [BuilderDebAsan]
|
||||||
|
runs-on: [self-hosted, stress-tester]
|
||||||
|
steps:
|
||||||
|
- name: Set envs
|
||||||
|
run: |
|
||||||
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
|
TEMP_PATH=${{runner.temp}}/integration_tests_asan
|
||||||
|
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||||
|
CHECK_NAME=Integration tests (asan, analyzer)
|
||||||
|
REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse
|
||||||
|
RUN_BY_HASH_NUM=5
|
||||||
|
RUN_BY_HASH_TOTAL=6
|
||||||
|
EOF
|
||||||
|
- name: Download json reports
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
path: ${{ env.REPORTS_PATH }}
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
- name: Integration test
|
||||||
|
run: |
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
mkdir -p "$TEMP_PATH"
|
||||||
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
|
cd "$REPO_COPY/tests/ci"
|
||||||
|
python3 integration_test_check.py "$CHECK_NAME"
|
||||||
|
- name: Cleanup
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
IntegrationTestsTsan0:
|
IntegrationTestsTsan0:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
runs-on: [self-hosted, stress-tester]
|
runs-on: [self-hosted, stress-tester]
|
||||||
@ -4847,6 +5099,12 @@ jobs:
|
|||||||
- IntegrationTestsAsan3
|
- IntegrationTestsAsan3
|
||||||
- IntegrationTestsAsan4
|
- IntegrationTestsAsan4
|
||||||
- IntegrationTestsAsan5
|
- IntegrationTestsAsan5
|
||||||
|
- IntegrationTestsAnalyzerAsan0
|
||||||
|
- IntegrationTestsAnalyzerAsan1
|
||||||
|
- IntegrationTestsAnalyzerAsan2
|
||||||
|
- IntegrationTestsAnalyzerAsan3
|
||||||
|
- IntegrationTestsAnalyzerAsan4
|
||||||
|
- IntegrationTestsAnalyzerAsan5
|
||||||
- IntegrationTestsRelease0
|
- IntegrationTestsRelease0
|
||||||
- IntegrationTestsRelease1
|
- IntegrationTestsRelease1
|
||||||
- IntegrationTestsRelease2
|
- IntegrationTestsRelease2
|
||||||
|
1
.github/workflows/woboq.yml
vendored
1
.github/workflows/woboq.yml
vendored
@ -12,6 +12,7 @@ jobs:
|
|||||||
# don't use dockerhub push because this image updates so rarely
|
# don't use dockerhub push because this image updates so rarely
|
||||||
WoboqCodebrowser:
|
WoboqCodebrowser:
|
||||||
runs-on: [self-hosted, style-checker]
|
runs-on: [self-hosted, style-checker]
|
||||||
|
timeout-minutes: 420 # the task is pretty heavy, so there's an additional hour
|
||||||
steps:
|
steps:
|
||||||
- name: Set envs
|
- name: Set envs
|
||||||
run: |
|
run: |
|
||||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -69,6 +69,7 @@ cmake-build-*
|
|||||||
*.pyc
|
*.pyc
|
||||||
__pycache__
|
__pycache__
|
||||||
*.pytest_cache
|
*.pytest_cache
|
||||||
|
.mypy_cache
|
||||||
|
|
||||||
test.cpp
|
test.cpp
|
||||||
CPackConfig.cmake
|
CPackConfig.cmake
|
||||||
@ -161,8 +162,10 @@ tests/queries/0_stateless/test_*
|
|||||||
tests/queries/0_stateless/*.binary
|
tests/queries/0_stateless/*.binary
|
||||||
tests/queries/0_stateless/*.generated-expect
|
tests/queries/0_stateless/*.generated-expect
|
||||||
tests/queries/0_stateless/*.expect.history
|
tests/queries/0_stateless/*.expect.history
|
||||||
|
tests/integration/**/_gen
|
||||||
|
|
||||||
# rust
|
# rust
|
||||||
/rust/**/target
|
/rust/**/target
|
||||||
# It is autogenerated from *.in
|
# It is autogenerated from *.in
|
||||||
/rust/**/.cargo/config.toml
|
/rust/**/.cargo/config.toml
|
||||||
|
/rust/**/vendor
|
||||||
|
6
.gitmodules
vendored
6
.gitmodules
vendored
@ -258,9 +258,6 @@
|
|||||||
[submodule "contrib/wyhash"]
|
[submodule "contrib/wyhash"]
|
||||||
path = contrib/wyhash
|
path = contrib/wyhash
|
||||||
url = https://github.com/wangyi-fudan/wyhash
|
url = https://github.com/wangyi-fudan/wyhash
|
||||||
[submodule "contrib/hashidsxx"]
|
|
||||||
path = contrib/hashidsxx
|
|
||||||
url = https://github.com/schoentoon/hashidsxx
|
|
||||||
[submodule "contrib/nats-io"]
|
[submodule "contrib/nats-io"]
|
||||||
path = contrib/nats-io
|
path = contrib/nats-io
|
||||||
url = https://github.com/ClickHouse/nats.c
|
url = https://github.com/ClickHouse/nats.c
|
||||||
@ -343,3 +340,6 @@
|
|||||||
[submodule "contrib/c-ares"]
|
[submodule "contrib/c-ares"]
|
||||||
path = contrib/c-ares
|
path = contrib/c-ares
|
||||||
url = https://github.com/c-ares/c-ares.git
|
url = https://github.com/c-ares/c-ares.git
|
||||||
|
[submodule "contrib/incbin"]
|
||||||
|
path = contrib/incbin
|
||||||
|
url = https://github.com/graphitemaster/incbin.git
|
||||||
|
101
CHANGELOG.md
101
CHANGELOG.md
@ -1,4 +1,5 @@
|
|||||||
### Table of Contents
|
### Table of Contents
|
||||||
|
**[ClickHouse release v23.6, 2023-06-30](#236)**<br/>
|
||||||
**[ClickHouse release v23.5, 2023-06-08](#235)**<br/>
|
**[ClickHouse release v23.5, 2023-06-08](#235)**<br/>
|
||||||
**[ClickHouse release v23.4, 2023-04-26](#234)**<br/>
|
**[ClickHouse release v23.4, 2023-04-26](#234)**<br/>
|
||||||
**[ClickHouse release v23.3 LTS, 2023-03-30](#233)**<br/>
|
**[ClickHouse release v23.3 LTS, 2023-03-30](#233)**<br/>
|
||||||
@ -8,6 +9,106 @@
|
|||||||
|
|
||||||
# 2023 Changelog
|
# 2023 Changelog
|
||||||
|
|
||||||
|
### <a id="236"></a> ClickHouse release 23.6, 2023-06-29
|
||||||
|
|
||||||
|
#### Backward Incompatible Change
|
||||||
|
* Delete feature `do_not_evict_index_and_mark_files` in the fs cache. This feature was only making things worse. [#51253](https://github.com/ClickHouse/ClickHouse/pull/51253) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Remove ALTER support for experimental LIVE VIEW. [#51287](https://github.com/ClickHouse/ClickHouse/pull/51287) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Decrease the default values for `http_max_field_value_size` and `http_max_field_name_size` to 128 KiB. [#51163](https://github.com/ClickHouse/ClickHouse/pull/51163) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* CGroups metrics related to CPU are replaced with one metric, `CGroupMaxCPU` for better usability. The `Normalized` CPU usage metrics will be normalized to CGroups limits instead of the total number of CPUs when they are set. This closes [#50836](https://github.com/ClickHouse/ClickHouse/issues/50836). [#50835](https://github.com/ClickHouse/ClickHouse/pull/50835) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
|
||||||
|
#### New Feature
|
||||||
|
* The function `transform` as well as `CASE` with value matching started to support all data types. This closes [#29730](https://github.com/ClickHouse/ClickHouse/issues/29730). This closes [#32387](https://github.com/ClickHouse/ClickHouse/issues/32387). This closes [#50827](https://github.com/ClickHouse/ClickHouse/issues/50827). This closes [#31336](https://github.com/ClickHouse/ClickHouse/issues/31336). This closes [#40493](https://github.com/ClickHouse/ClickHouse/issues/40493). [#51351](https://github.com/ClickHouse/ClickHouse/pull/51351) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Added option `--rename_files_after_processing <pattern>`. This closes [#34207](https://github.com/ClickHouse/ClickHouse/issues/34207). [#49626](https://github.com/ClickHouse/ClickHouse/pull/49626) ([alekseygolub](https://github.com/alekseygolub)).
|
||||||
|
* Add support for `TRUNCATE` modifier in `INTO OUTFILE` clause. Suggest using `APPEND` or `TRUNCATE` for `INTO OUTFILE` when file exists. [#50950](https://github.com/ClickHouse/ClickHouse/pull/50950) ([alekar](https://github.com/alekar)).
|
||||||
|
* Add table engine `Redis` and table function `redis`. It allows querying external Redis servers. [#50150](https://github.com/ClickHouse/ClickHouse/pull/50150) ([JackyWoo](https://github.com/JackyWoo)).
|
||||||
|
* Allow to skip empty files in file/s3/url/hdfs table functions using settings `s3_skip_empty_files`, `hdfs_skip_empty_files`, `engine_file_skip_empty_files`, `engine_url_skip_empty_files`. [#50364](https://github.com/ClickHouse/ClickHouse/pull/50364) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Add a new setting named `use_mysql_types_in_show_columns` to alter the `SHOW COLUMNS` SQL statement to display MySQL equivalent types when a client is connected via the MySQL compatibility port. [#49577](https://github.com/ClickHouse/ClickHouse/pull/49577) ([Thomas Panetti](https://github.com/tpanetti)).
|
||||||
|
* Clickhouse-client can now be called with a connection string instead of "--host", "--port", "--user" etc. [#50689](https://github.com/ClickHouse/ClickHouse/pull/50689) ([Alexey Gerasimchuck](https://github.com/Demilivor)).
|
||||||
|
* Add setting `session_timezone`; it is used as the default timezone for a session when not explicitly specified. [#44149](https://github.com/ClickHouse/ClickHouse/pull/44149) ([Andrey Zvonov](https://github.com/zvonand)).
|
||||||
|
* Codec DEFLATE_QPL is now controlled via server setting "enable_deflate_qpl_codec" (default: false) instead of setting "allow_experimental_codecs". This marks DEFLATE_QPL non-experimental. [#50775](https://github.com/ClickHouse/ClickHouse/pull/50775) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
|
||||||
|
#### Performance Improvement
|
||||||
|
* Improved scheduling of merge selecting and cleanup tasks in `ReplicatedMergeTree`. The tasks will not be executed too frequently when there's nothing to merge or cleanup. Added settings `max_merge_selecting_sleep_ms`, `merge_selecting_sleep_slowdown_factor`, `max_cleanup_delay_period` and `cleanup_thread_preferred_points_per_iteration`. It should close [#31919](https://github.com/ClickHouse/ClickHouse/issues/31919). [#50107](https://github.com/ClickHouse/ClickHouse/pull/50107) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Make filter push down through cross join. [#50605](https://github.com/ClickHouse/ClickHouse/pull/50605) ([Han Fei](https://github.com/hanfei1991)).
|
||||||
|
* Improve performance with enabled QueryProfiler using thread-local timer_id instead of global object. [#48778](https://github.com/ClickHouse/ClickHouse/pull/48778) ([Jiebin Sun](https://github.com/jiebinn)).
|
||||||
|
* Rewrite CapnProto input/output format to improve its performance. Map column names and CapnProto fields case insensitive, fix reading/writing of nested structure fields. [#49752](https://github.com/ClickHouse/ClickHouse/pull/49752) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Optimize parquet write performance for parallel threads. [#50102](https://github.com/ClickHouse/ClickHouse/pull/50102) ([Hongbin Ma](https://github.com/binmahone)).
|
||||||
|
* Disable `parallelize_output_from_storages` for processing MATERIALIZED VIEWs and storages with one block only. [#50214](https://github.com/ClickHouse/ClickHouse/pull/50214) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Merge PR [#46558](https://github.com/ClickHouse/ClickHouse/pull/46558). Avoid block permutation during sort if the block is already sorted. [#50697](https://github.com/ClickHouse/ClickHouse/pull/50697) ([Alexey Milovidov](https://github.com/alexey-milovidov), [Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* Make multiple list requests to ZooKeeper in parallel to speed up reading from system.zookeeper table. [#51042](https://github.com/ClickHouse/ClickHouse/pull/51042) ([Alexander Gololobov](https://github.com/davenger)).
|
||||||
|
* Speedup initialization of DateTime lookup tables for time zones. This should reduce startup/connect time of clickhouse-client especially in debug build as it is rather heavy. [#51347](https://github.com/ClickHouse/ClickHouse/pull/51347) ([Alexander Gololobov](https://github.com/davenger)).
|
||||||
|
* Fix data lakes slowness because of synchronous head requests. (Related to Iceberg/Deltalake/Hudi being slow with a lot of files). [#50976](https://github.com/ClickHouse/ClickHouse/pull/50976) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Do not read all the columns from right GLOBAL JOIN table. [#50721](https://github.com/ClickHouse/ClickHouse/pull/50721) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
|
||||||
|
#### Experimental Feature
|
||||||
|
* Support parallel replicas with the analyzer. [#50441](https://github.com/ClickHouse/ClickHouse/pull/50441) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
* Add random sleep before large merges/mutations execution to split load more evenly between replicas in case of zero-copy replication. [#51282](https://github.com/ClickHouse/ClickHouse/pull/51282) ([alesapin](https://github.com/alesapin)).
|
||||||
|
* Do not replicate `ALTER PARTITION` queries and mutations through `Replicated` database if it has only one shard and the underlying table is `ReplicatedMergeTree`. [#51049](https://github.com/ClickHouse/ClickHouse/pull/51049) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
|
||||||
|
#### Improvement
|
||||||
|
* Relax the thresholds for "too many parts" to be more modern. Return the backpressure during long-running insert queries. [#50856](https://github.com/ClickHouse/ClickHouse/pull/50856) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Allow to cast IPv6 to IPv4 address for CIDR ::ffff:0:0/96 (IPv4-mapped addresses). [#49759](https://github.com/ClickHouse/ClickHouse/pull/49759) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* Update MongoDB protocol to support MongoDB 5.1 version and newer. Support for the versions with the old protocol (<3.6) is preserved. Closes [#45621](https://github.com/ClickHouse/ClickHouse/issues/45621), [#49879](https://github.com/ClickHouse/ClickHouse/issues/49879). [#50061](https://github.com/ClickHouse/ClickHouse/pull/50061) ([Nikolay Degterinsky](https://github.com/evillique)).
|
||||||
|
* Add setting `input_format_max_bytes_to_read_for_schema_inference` to limit the number of bytes to read in schema inference. Closes [#50577](https://github.com/ClickHouse/ClickHouse/issues/50577). [#50592](https://github.com/ClickHouse/ClickHouse/pull/50592) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Respect setting `input_format_null_as_default` in schema inference. [#50602](https://github.com/ClickHouse/ClickHouse/pull/50602) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Allow to skip trailing empty lines in CSV/TSV/CustomSeparated formats via settings `input_format_csv_skip_trailing_empty_lines`, `input_format_tsv_skip_trailing_empty_lines` and `input_format_custom_skip_trailing_empty_lines` (disabled by default). Closes [#49315](https://github.com/ClickHouse/ClickHouse/issues/49315). [#50635](https://github.com/ClickHouse/ClickHouse/pull/50635) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Functions "toDateOrDefault|OrNull" and "accuateCast[OrDefault|OrNull]" now correctly parse numeric arguments. [#50709](https://github.com/ClickHouse/ClickHouse/pull/50709) ([Dmitry Kardymon](https://github.com/kardymonds)).
|
||||||
|
* Support CSV with whitespace or `\t` field delimiters, and these delimiters are supported in Spark. [#50712](https://github.com/ClickHouse/ClickHouse/pull/50712) ([KevinyhZou](https://github.com/KevinyhZou)).
|
||||||
|
* Settings `number_of_mutations_to_delay` and `number_of_mutations_to_throw` are enabled by default now with values 500 and 1000 respectively. [#50726](https://github.com/ClickHouse/ClickHouse/pull/50726) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* The dashboard correctly shows missing values. This closes [#50831](https://github.com/ClickHouse/ClickHouse/issues/50831). [#50832](https://github.com/ClickHouse/ClickHouse/pull/50832) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Added the possibility to use date and time arguments in the syslog timestamp format in functions `parseDateTimeBestEffort*` and `parseDateTime64BestEffort*`. [#50925](https://github.com/ClickHouse/ClickHouse/pull/50925) ([Victor Krasnov](https://github.com/sirvickr)).
|
||||||
|
* Command line parameter "--password" in clickhouse-client can now be specified only once. [#50966](https://github.com/ClickHouse/ClickHouse/pull/50966) ([Alexey Gerasimchuck](https://github.com/Demilivor)).
|
||||||
|
* Use `hash_of_all_files` from `system.parts` to check identity of parts during on-cluster backups. [#50997](https://github.com/ClickHouse/ClickHouse/pull/50997) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* The system table zookeeper_connection connected_time identifies the time when the connection is established (standard format), and session_uptime_elapsed_seconds is added, which labels the duration of the established connection session (in seconds). [#51026](https://github.com/ClickHouse/ClickHouse/pull/51026) ([郭小龙](https://github.com/guoxiaolongzte)).
|
||||||
|
* Improve the progress bar for file/s3/hdfs/url table functions by using chunk size from source data and using incremental total size counting in each thread. Fix the progress bar for *Cluster functions. This closes [#47250](https://github.com/ClickHouse/ClickHouse/issues/47250). [#51088](https://github.com/ClickHouse/ClickHouse/pull/51088) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Add total_bytes_to_read to the Progress packet in TCP protocol for better Progress bar. [#51158](https://github.com/ClickHouse/ClickHouse/pull/51158) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Better checking of data parts on disks with filesystem cache. [#51164](https://github.com/ClickHouse/ClickHouse/pull/51164) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Fix sometimes not correct current_elements_num in fs cache. [#51242](https://github.com/ClickHouse/ClickHouse/pull/51242) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
|
||||||
|
#### Build/Testing/Packaging Improvement
|
||||||
|
* Add embedded keeper-client to standalone keeper binary. [#50964](https://github.com/ClickHouse/ClickHouse/pull/50964) ([pufit](https://github.com/pufit)).
|
||||||
|
* Actual LZ4 version is used now. [#50621](https://github.com/ClickHouse/ClickHouse/pull/50621) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* ClickHouse server will print the list of changed settings on fatal errors. This closes [#51137](https://github.com/ClickHouse/ClickHouse/issues/51137). [#51138](https://github.com/ClickHouse/ClickHouse/pull/51138) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Allow building ClickHouse with clang-17. [#51300](https://github.com/ClickHouse/ClickHouse/pull/51300) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* [SQLancer](https://github.com/sqlancer/sqlancer) check is considered stable as bugs that were triggered by it are fixed. Now failures of SQLancer check will be reported as failed check status. [#51340](https://github.com/ClickHouse/ClickHouse/pull/51340) ([Ilya Yatsishin](https://github.com/qoega)).
|
||||||
|
* Split huge `RUN` in Dockerfile into smaller conditional. Install the necessary tools on demand in the same `RUN` layer, and remove them after that. Upgrade the OS only once at the beginning. Use a modern way to check the signed repository. Downgrade the base repo to ubuntu:20.04 to address the issues on older docker versions. Upgrade golang version to address golang vulnerabilities. [#51504](https://github.com/ClickHouse/ClickHouse/pull/51504) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
|
||||||
|
#### Bug Fix (user-visible misbehavior in an official stable release)
|
||||||
|
|
||||||
|
* Report loading status for executable dictionaries correctly [#48775](https://github.com/ClickHouse/ClickHouse/pull/48775) ([Anton Kozlov](https://github.com/tonickkozlov)).
|
||||||
|
* Proper mutation of skip indices and projections [#50104](https://github.com/ClickHouse/ClickHouse/pull/50104) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Cleanup moving parts [#50489](https://github.com/ClickHouse/ClickHouse/pull/50489) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Fix backward compatibility for IP types hashing in aggregate functions [#50551](https://github.com/ClickHouse/ClickHouse/pull/50551) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* Fix Log family table return wrong rows count after truncate [#50585](https://github.com/ClickHouse/ClickHouse/pull/50585) ([flynn](https://github.com/ucasfl)).
|
||||||
|
* Fix bug in `uniqExact` parallel merging [#50590](https://github.com/ClickHouse/ClickHouse/pull/50590) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* Revert recent grace hash join changes [#50699](https://github.com/ClickHouse/ClickHouse/pull/50699) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Query Cache: Try to fix bad cast from `ColumnConst` to `ColumnVector<char8_t>` [#50704](https://github.com/ClickHouse/ClickHouse/pull/50704) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Avoid storing logs in Keeper containing unknown operation [#50751](https://github.com/ClickHouse/ClickHouse/pull/50751) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* SummingMergeTree support for DateTime64 [#50797](https://github.com/ClickHouse/ClickHouse/pull/50797) ([Jordi Villar](https://github.com/jrdi)).
|
||||||
|
* Add compatibility setting for non-const timezones [#50834](https://github.com/ClickHouse/ClickHouse/pull/50834) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Fix hashing of LDAP params in the cache entries [#50865](https://github.com/ClickHouse/ClickHouse/pull/50865) ([Julian Maicher](https://github.com/jmaicher)).
|
||||||
|
* Fallback to parsing big integer from String instead of exception in Parquet format [#50873](https://github.com/ClickHouse/ClickHouse/pull/50873) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Fix checking the lock file too often while writing a backup [#50889](https://github.com/ClickHouse/ClickHouse/pull/50889) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Do not apply projection if read-in-order was enabled. [#50923](https://github.com/ClickHouse/ClickHouse/pull/50923) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Fix race in the Azure blob storage iterator [#50936](https://github.com/ClickHouse/ClickHouse/pull/50936) ([SmitaRKulkarni](https://github.com/SmitaRKulkarni)).
|
||||||
|
* Fix erroneous `sort_description` propagation in `CreatingSets` [#50955](https://github.com/ClickHouse/ClickHouse/pull/50955) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* Fix Iceberg v2 optional metadata parsing [#50974](https://github.com/ClickHouse/ClickHouse/pull/50974) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* MaterializedMySQL: Keep parentheses for empty table overrides [#50977](https://github.com/ClickHouse/ClickHouse/pull/50977) ([Val Doroshchuk](https://github.com/valbok)).
|
||||||
|
* Fix crash in BackupCoordinationStageSync::setError() [#51012](https://github.com/ClickHouse/ClickHouse/pull/51012) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Fix subtly broken copy-on-write of ColumnLowCardinality dictionary [#51064](https://github.com/ClickHouse/ClickHouse/pull/51064) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Generate safe IVs [#51086](https://github.com/ClickHouse/ClickHouse/pull/51086) ([Salvatore Mesoraca](https://github.com/aiven-sal)).
|
||||||
|
* Fix ineffective query cache for SELECTs with subqueries [#51132](https://github.com/ClickHouse/ClickHouse/pull/51132) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Fix Set index with constant nullable comparison. [#51205](https://github.com/ClickHouse/ClickHouse/pull/51205) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Fix a crash in s3 and s3Cluster functions [#51209](https://github.com/ClickHouse/ClickHouse/pull/51209) ([Nikolay Degterinsky](https://github.com/evillique)).
|
||||||
|
* Fix a crash with compiled expressions [#51231](https://github.com/ClickHouse/ClickHouse/pull/51231) ([LiuNeng](https://github.com/liuneng1994)).
|
||||||
|
* Fix use-after-free in StorageURL when switching URLs [#51260](https://github.com/ClickHouse/ClickHouse/pull/51260) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Updated check for parameterized view [#51272](https://github.com/ClickHouse/ClickHouse/pull/51272) ([SmitaRKulkarni](https://github.com/SmitaRKulkarni)).
|
||||||
|
* Fix multiple writing of same file to backup [#51299](https://github.com/ClickHouse/ClickHouse/pull/51299) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Fix fuzzer failure in ActionsDAG [#51301](https://github.com/ClickHouse/ClickHouse/pull/51301) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Remove garbage from function `transform` [#51350](https://github.com/ClickHouse/ClickHouse/pull/51350) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
|
||||||
|
|
||||||
### <a id="235"></a> ClickHouse release 23.5, 2023-06-08
|
### <a id="235"></a> ClickHouse release 23.5, 2023-06-08
|
||||||
|
|
||||||
#### Upgrade Notes
|
#### Upgrade Notes
|
||||||
|
@ -87,7 +87,6 @@ if (ENABLE_FUZZING)
|
|||||||
set (ENABLE_CLICKHOUSE_ODBC_BRIDGE OFF)
|
set (ENABLE_CLICKHOUSE_ODBC_BRIDGE OFF)
|
||||||
set (ENABLE_LIBRARIES 0)
|
set (ENABLE_LIBRARIES 0)
|
||||||
set (ENABLE_SSL 1)
|
set (ENABLE_SSL 1)
|
||||||
set (USE_UNWIND ON)
|
|
||||||
set (ENABLE_EMBEDDED_COMPILER 0)
|
set (ENABLE_EMBEDDED_COMPILER 0)
|
||||||
set (ENABLE_EXAMPLES 0)
|
set (ENABLE_EXAMPLES 0)
|
||||||
set (ENABLE_UTILS 0)
|
set (ENABLE_UTILS 0)
|
||||||
@ -344,9 +343,9 @@ if (COMPILER_CLANG)
|
|||||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fdiagnostics-absolute-paths")
|
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fdiagnostics-absolute-paths")
|
||||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fdiagnostics-absolute-paths")
|
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fdiagnostics-absolute-paths")
|
||||||
|
|
||||||
if (NOT ENABLE_TESTS AND NOT SANITIZE)
|
if (NOT ENABLE_TESTS AND NOT SANITIZE AND OS_LINUX)
|
||||||
# https://clang.llvm.org/docs/ThinLTO.html
|
# https://clang.llvm.org/docs/ThinLTO.html
|
||||||
# Applies to clang only.
|
# Applies to clang and linux only.
|
||||||
# Disabled when building with tests or sanitizers.
|
# Disabled when building with tests or sanitizers.
|
||||||
option(ENABLE_THINLTO "Clang-specific link time optimization" ON)
|
option(ENABLE_THINLTO "Clang-specific link time optimization" ON)
|
||||||
endif()
|
endif()
|
||||||
|
23
README.md
23
README.md
@ -16,28 +16,31 @@ curl https://clickhouse.com/ | sh
|
|||||||
* [YouTube channel](https://www.youtube.com/c/ClickHouseDB) has a lot of content about ClickHouse in video format.
|
* [YouTube channel](https://www.youtube.com/c/ClickHouseDB) has a lot of content about ClickHouse in video format.
|
||||||
* [Slack](https://clickhouse.com/slack) and [Telegram](https://telegram.me/clickhouse_en) allow chatting with ClickHouse users in real-time.
|
* [Slack](https://clickhouse.com/slack) and [Telegram](https://telegram.me/clickhouse_en) allow chatting with ClickHouse users in real-time.
|
||||||
* [Blog](https://clickhouse.com/blog/) contains various ClickHouse-related articles, as well as announcements and reports about events.
|
* [Blog](https://clickhouse.com/blog/) contains various ClickHouse-related articles, as well as announcements and reports about events.
|
||||||
* [Code Browser (Woboq)](https://clickhouse.com/codebrowser/ClickHouse/index.html) with syntax highlight and navigation.
|
* [Code Browser (Woboq)](https://clickhouse.com/codebrowser/ClickHouse/index.html) with syntax highlighting and navigation.
|
||||||
* [Code Browser (github.dev)](https://github.dev/ClickHouse/ClickHouse) with syntax highlight, powered by github.dev.
|
* [Code Browser (github.dev)](https://github.dev/ClickHouse/ClickHouse) with syntax highlighting, powered by github.dev.
|
||||||
|
* [Static Analysis (SonarCloud)](https://sonarcloud.io/project/issues?resolved=false&id=ClickHouse_ClickHouse) proposes C++ quality improvements.
|
||||||
* [Contacts](https://clickhouse.com/company/contact) can help to get your questions answered if there are any.
|
* [Contacts](https://clickhouse.com/company/contact) can help to get your questions answered if there are any.
|
||||||
|
|
||||||
## Upcoming Events
|
## Upcoming Events
|
||||||
|
|
||||||
* [**v23.5 Release Webinar**](https://clickhouse.com/company/events/v23-5-release-webinar?utm_source=github&utm_medium=social&utm_campaign=release-webinar-2023-05) - Jun 8 - 23.5 is rapidly approaching. Original creator, co-founder, and CTO of ClickHouse Alexey Milovidov will walk us through the highlights of the release.
|
* [**v23.7 Release Webinar**](https://clickhouse.com/company/events/v23-7-community-release-call?utm_source=github&utm_medium=social&utm_campaign=release-webinar-2023-07) - Jul 27 - 23.7 is rapidly approaching. Original creator, co-founder, and CTO of ClickHouse Alexey Milovidov will walk us through the highlights of the release.
|
||||||
* [**ClickHouse Meetup in Bangalore**](https://www.meetup.com/clickhouse-bangalore-user-group/events/293740066/) - Jun 7
|
* [**ClickHouse Meetup in Boston**](https://www.meetup.com/clickhouse-boston-user-group/events/293913596) - Jul 18
|
||||||
* [**ClickHouse Meetup in San Francisco**](https://www.meetup.com/clickhouse-silicon-valley-meetup-group/events/293426725/) - Jun 7
|
* [**ClickHouse Meetup in NYC**](https://www.meetup.com/clickhouse-new-york-user-group/events/293913441) - Jul 19
|
||||||
|
* [**ClickHouse Meetup in Toronto**](https://www.meetup.com/clickhouse-toronto-user-group/events/294183127) - Jul 20
|
||||||
|
* [**ClickHouse Meetup in Singapore**](https://www.meetup.com/clickhouse-singapore-meetup-group/events/294428050/) - Jul 27
|
||||||
|
* [**ClickHouse Meetup in Paris**](https://www.meetup.com/clickhouse-france-user-group/events/294283460) - Sep 12
|
||||||
|
|
||||||
|
Also, keep an eye out for upcoming meetups around the world. Somewhere else you want us to be? Please feel free to reach out to tyler <at> clickhouse <dot> com.
|
||||||
Also, keep an eye out for upcoming meetups in Amsterdam, Boston, NYC, Beijing, and Toronto. Somewhere else you want us to be? Please feel free to reach out to tyler <at> clickhouse <dot> com.
|
|
||||||
|
|
||||||
## Recent Recordings
|
## Recent Recordings
|
||||||
* **Recent Meetup Videos**: [Meetup Playlist](https://www.youtube.com/playlist?list=PL0Z2YDlm0b3iNDUzpY1S3L_iV4nARda_U) Whenever possible recordings of the ClickHouse Community Meetups are edited and presented as individual talks. Current featuring "Modern SQL in 2023", "Fast, Concurrent, and Consistent Asynchronous INSERTS in ClickHouse", and "Full-Text Indices: Design and Experiments"
|
* **Recent Meetup Videos**: [Meetup Playlist](https://www.youtube.com/playlist?list=PL0Z2YDlm0b3iNDUzpY1S3L_iV4nARda_U) Whenever possible recordings of the ClickHouse Community Meetups are edited and presented as individual talks. Current featuring "Modern SQL in 2023", "Fast, Concurrent, and Consistent Asynchronous INSERTS in ClickHouse", and "Full-Text Indices: Design and Experiments"
|
||||||
* **Recording available**: [**v23.4 Release Webinar**](https://www.youtube.com/watch?v=4rrf6bk_mOg) Faster Parquet Reading, Asynchonous Connections to Reoplicas, Trailing Comma before FROM, extractKeyValuePairs, integrations updates, and so much more! Watch it now!
|
* **Recording available**: [**v23.6 Release Webinar**](https://www.youtube.com/watch?v=cuf_hYn7dqU) All the features of 23.6, one convenient video! Watch it now!
|
||||||
* **All release webinar recordings**: [YouTube playlist](https://www.youtube.com/playlist?list=PL0Z2YDlm0b3jAlSy1JxyP8zluvXaN3nxU)
|
* **All release webinar recordings**: [YouTube playlist](https://www.youtube.com/playlist?list=PL0Z2YDlm0b3jAlSy1JxyP8zluvXaN3nxU)
|
||||||
|
|
||||||
|
|
||||||
## Interested in joining ClickHouse and making it your full time job?
|
## Interested in joining ClickHouse and making it your full-time job?
|
||||||
|
|
||||||
We are a globally diverse and distributed team, united behind a common goal of creating industry-leading, real-time analytics. Here, you will have an opportunity to solve some of the most cutting edge technical challenges and have direct ownership of your work and vision. If you are a contributor by nature, a thinker as well as a doer - we’ll definitely click!
|
We are a globally diverse and distributed team, united behind a common goal of creating industry-leading, real-time analytics. Here, you will have an opportunity to solve some of the most cutting-edge technical challenges and have direct ownership of your work and vision. If you are a contributor by nature, a thinker and a doer - we’ll definitely click!
|
||||||
|
|
||||||
Check out our **current openings** here: https://clickhouse.com/company/careers
|
Check out our **current openings** here: https://clickhouse.com/company/careers
|
||||||
|
|
||||||
|
@ -13,6 +13,7 @@ The following versions of ClickHouse server are currently being supported with s
|
|||||||
|
|
||||||
| Version | Supported |
|
| Version | Supported |
|
||||||
|:-|:-|
|
|:-|:-|
|
||||||
|
| 23.6 | ✔️ |
|
||||||
| 23.5 | ✔️ |
|
| 23.5 | ✔️ |
|
||||||
| 23.4 | ✔️ |
|
| 23.4 | ✔️ |
|
||||||
| 23.3 | ✔️ |
|
| 23.3 | ✔️ |
|
||||||
|
@ -2,21 +2,23 @@
|
|||||||
|
|
||||||
#include <base/strong_typedef.h>
|
#include <base/strong_typedef.h>
|
||||||
#include <base/extended_types.h>
|
#include <base/extended_types.h>
|
||||||
|
#include <Common/formatIPv6.h>
|
||||||
#include <Common/memcmpSmall.h>
|
#include <Common/memcmpSmall.h>
|
||||||
|
|
||||||
namespace DB
|
namespace DB
|
||||||
{
|
{
|
||||||
|
|
||||||
using IPv4 = StrongTypedef<UInt32, struct IPv4Tag>;
|
struct IPv4 : StrongTypedef<UInt32, struct IPv4Tag>
|
||||||
|
{
|
||||||
|
using StrongTypedef::StrongTypedef;
|
||||||
|
using StrongTypedef::operator=;
|
||||||
|
constexpr explicit IPv4(UInt64 value): StrongTypedef(static_cast<UnderlyingType>(value)) {}
|
||||||
|
};
|
||||||
|
|
||||||
struct IPv6 : StrongTypedef<UInt128, struct IPv6Tag>
|
struct IPv6 : StrongTypedef<UInt128, struct IPv6Tag>
|
||||||
{
|
{
|
||||||
constexpr IPv6() = default;
|
using StrongTypedef::StrongTypedef;
|
||||||
constexpr explicit IPv6(const UInt128 & x) : StrongTypedef(x) {}
|
using StrongTypedef::operator=;
|
||||||
constexpr explicit IPv6(UInt128 && x) : StrongTypedef(std::move(x)) {}
|
|
||||||
|
|
||||||
IPv6 & operator=(const UInt128 & rhs) { StrongTypedef::operator=(rhs); return *this; }
|
|
||||||
IPv6 & operator=(UInt128 && rhs) { StrongTypedef::operator=(std::move(rhs)); return *this; }
|
|
||||||
|
|
||||||
bool operator<(const IPv6 & rhs) const
|
bool operator<(const IPv6 & rhs) const
|
||||||
{
|
{
|
||||||
@ -54,12 +56,22 @@ namespace DB
|
|||||||
|
|
||||||
namespace std
|
namespace std
|
||||||
{
|
{
|
||||||
|
/// For historical reasons we hash IPv6 as a FixedString(16)
|
||||||
template <>
|
template <>
|
||||||
struct hash<DB::IPv6>
|
struct hash<DB::IPv6>
|
||||||
{
|
{
|
||||||
size_t operator()(const DB::IPv6 & x) const
|
size_t operator()(const DB::IPv6 & x) const
|
||||||
{
|
{
|
||||||
return std::hash<DB::IPv6::UnderlyingType>()(x.toUnderType());
|
return std::hash<std::string_view>{}(std::string_view(reinterpret_cast<const char*>(&x.toUnderType()), IPV6_BINARY_LENGTH));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
template <>
|
||||||
|
struct hash<DB::IPv4>
|
||||||
|
{
|
||||||
|
size_t operator()(const DB::IPv4 & x) const
|
||||||
|
{
|
||||||
|
return std::hash<DB::IPv4::UnderlyingType>()(x.toUnderType());
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -7,7 +7,13 @@
|
|||||||
|
|
||||||
/** Returns value `from` converted to type `To` while retaining bit representation.
|
/** Returns value `from` converted to type `To` while retaining bit representation.
|
||||||
* `To` and `From` must satisfy `CopyConstructible`.
|
* `To` and `From` must satisfy `CopyConstructible`.
|
||||||
|
*
|
||||||
* In contrast to std::bit_cast can cast types of different width.
|
* In contrast to std::bit_cast can cast types of different width.
|
||||||
|
*
|
||||||
|
* Note: for signed types of narrower size, the casted result is zero-extended
|
||||||
|
* instead of sign-extended as with regular static_cast.
|
||||||
|
* For example, -1 Int8 (represented as 0xFF) bit_casted to UInt64
|
||||||
|
* gives 255 (represented as 0x00000000000000FF) instead of 0xFFFFFFFFFFFFFFFF
|
||||||
*/
|
*/
|
||||||
template <typename To, typename From>
|
template <typename To, typename From>
|
||||||
std::decay_t<To> bit_cast(const From & from)
|
std::decay_t<To> bit_cast(const From & from)
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
#include <cstdint>
|
#include <cstdint>
|
||||||
#include <string>
|
#include <string>
|
||||||
|
#include <array>
|
||||||
|
|
||||||
#if defined(__SSE2__)
|
#if defined(__SSE2__)
|
||||||
#include <emmintrin.h>
|
#include <emmintrin.h>
|
||||||
@ -447,7 +448,7 @@ inline char * find_last_not_symbols_or_null(char * begin, char * end)
|
|||||||
/// See https://github.com/boostorg/algorithm/issues/63
|
/// See https://github.com/boostorg/algorithm/issues/63
|
||||||
/// And https://bugs.llvm.org/show_bug.cgi?id=41141
|
/// And https://bugs.llvm.org/show_bug.cgi?id=41141
|
||||||
template <char... symbols, typename To>
|
template <char... symbols, typename To>
|
||||||
inline void splitInto(To & to, const std::string & what, bool token_compress = false)
|
inline To & splitInto(To & to, std::string_view what, bool token_compress = false)
|
||||||
{
|
{
|
||||||
const char * pos = what.data();
|
const char * pos = what.data();
|
||||||
const char * end = pos + what.size();
|
const char * end = pos + what.size();
|
||||||
@ -463,4 +464,6 @@ inline void splitInto(To & to, const std::string & what, bool token_compress = f
|
|||||||
else
|
else
|
||||||
pos = delimiter_or_end;
|
pos = delimiter_or_end;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return to;
|
||||||
}
|
}
|
||||||
|
@ -15,25 +15,34 @@
|
|||||||
|
|
||||||
|
|
||||||
static thread_local uint64_t current_tid = 0;
|
static thread_local uint64_t current_tid = 0;
|
||||||
|
|
||||||
|
static void setCurrentThreadId()
|
||||||
|
{
|
||||||
|
#if defined(OS_ANDROID)
|
||||||
|
current_tid = gettid();
|
||||||
|
#elif defined(OS_LINUX)
|
||||||
|
current_tid = static_cast<uint64_t>(syscall(SYS_gettid)); /// This call is always successful. - man gettid
|
||||||
|
#elif defined(OS_FREEBSD)
|
||||||
|
current_tid = pthread_getthreadid_np();
|
||||||
|
#elif defined(OS_SUNOS)
|
||||||
|
// On Solaris-derived systems, this returns the ID of the LWP, analogous
|
||||||
|
// to a thread.
|
||||||
|
current_tid = static_cast<uint64_t>(pthread_self());
|
||||||
|
#else
|
||||||
|
if (0 != pthread_threadid_np(nullptr, ¤t_tid))
|
||||||
|
throw std::logic_error("pthread_threadid_np returned error");
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
uint64_t getThreadId()
|
uint64_t getThreadId()
|
||||||
{
|
{
|
||||||
if (!current_tid)
|
if (!current_tid)
|
||||||
{
|
setCurrentThreadId();
|
||||||
#if defined(OS_ANDROID)
|
|
||||||
current_tid = gettid();
|
|
||||||
#elif defined(OS_LINUX)
|
|
||||||
current_tid = static_cast<uint64_t>(syscall(SYS_gettid)); /// This call is always successful. - man gettid
|
|
||||||
#elif defined(OS_FREEBSD)
|
|
||||||
current_tid = pthread_getthreadid_np();
|
|
||||||
#elif defined(OS_SUNOS)
|
|
||||||
// On Solaris-derived systems, this returns the ID of the LWP, analogous
|
|
||||||
// to a thread.
|
|
||||||
current_tid = static_cast<uint64_t>(pthread_self());
|
|
||||||
#else
|
|
||||||
if (0 != pthread_threadid_np(nullptr, ¤t_tid))
|
|
||||||
throw std::logic_error("pthread_threadid_np returned error");
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
return current_tid;
|
return current_tid;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void updateCurrentThreadIdAfterFork()
|
||||||
|
{
|
||||||
|
setCurrentThreadId();
|
||||||
|
}
|
||||||
|
@ -3,3 +3,5 @@
|
|||||||
|
|
||||||
/// Obtain thread id from OS. The value is cached in thread local variable.
|
/// Obtain thread id from OS. The value is cached in thread local variable.
|
||||||
uint64_t getThreadId();
|
uint64_t getThreadId();
|
||||||
|
|
||||||
|
void updateCurrentThreadIdAfterFork();
|
||||||
|
426
base/base/hex.h
426
base/base/hex.h
@ -4,212 +4,288 @@
|
|||||||
#include <cstring>
|
#include <cstring>
|
||||||
#include "types.h"
|
#include "types.h"
|
||||||
|
|
||||||
/// Maps 0..15 to 0..9A..F or 0..9a..f correspondingly.
|
namespace CityHash_v1_0_2 { struct uint128; }
|
||||||
|
|
||||||
constexpr inline std::string_view hex_digit_to_char_uppercase_table = "0123456789ABCDEF";
|
namespace wide
|
||||||
constexpr inline std::string_view hex_digit_to_char_lowercase_table = "0123456789abcdef";
|
{
|
||||||
|
template <size_t Bits, typename Signed>
|
||||||
|
class integer;
|
||||||
|
}
|
||||||
|
|
||||||
|
namespace impl
|
||||||
|
{
|
||||||
|
/// Maps 0..15 to 0..9A..F or 0..9a..f correspondingly.
|
||||||
|
constexpr inline std::string_view hex_digit_to_char_uppercase_table = "0123456789ABCDEF";
|
||||||
|
constexpr inline std::string_view hex_digit_to_char_lowercase_table = "0123456789abcdef";
|
||||||
|
|
||||||
|
/// Maps 0..255 to 00..FF or 00..ff correspondingly.
|
||||||
|
constexpr inline std::string_view hex_byte_to_char_uppercase_table = //
|
||||||
|
"000102030405060708090A0B0C0D0E0F"
|
||||||
|
"101112131415161718191A1B1C1D1E1F"
|
||||||
|
"202122232425262728292A2B2C2D2E2F"
|
||||||
|
"303132333435363738393A3B3C3D3E3F"
|
||||||
|
"404142434445464748494A4B4C4D4E4F"
|
||||||
|
"505152535455565758595A5B5C5D5E5F"
|
||||||
|
"606162636465666768696A6B6C6D6E6F"
|
||||||
|
"707172737475767778797A7B7C7D7E7F"
|
||||||
|
"808182838485868788898A8B8C8D8E8F"
|
||||||
|
"909192939495969798999A9B9C9D9E9F"
|
||||||
|
"A0A1A2A3A4A5A6A7A8A9AAABACADAEAF"
|
||||||
|
"B0B1B2B3B4B5B6B7B8B9BABBBCBDBEBF"
|
||||||
|
"C0C1C2C3C4C5C6C7C8C9CACBCCCDCECF"
|
||||||
|
"D0D1D2D3D4D5D6D7D8D9DADBDCDDDEDF"
|
||||||
|
"E0E1E2E3E4E5E6E7E8E9EAEBECEDEEEF"
|
||||||
|
"F0F1F2F3F4F5F6F7F8F9FAFBFCFDFEFF";
|
||||||
|
|
||||||
|
constexpr inline std::string_view hex_byte_to_char_lowercase_table = //
|
||||||
|
"000102030405060708090a0b0c0d0e0f"
|
||||||
|
"101112131415161718191a1b1c1d1e1f"
|
||||||
|
"202122232425262728292a2b2c2d2e2f"
|
||||||
|
"303132333435363738393a3b3c3d3e3f"
|
||||||
|
"404142434445464748494a4b4c4d4e4f"
|
||||||
|
"505152535455565758595a5b5c5d5e5f"
|
||||||
|
"606162636465666768696a6b6c6d6e6f"
|
||||||
|
"707172737475767778797a7b7c7d7e7f"
|
||||||
|
"808182838485868788898a8b8c8d8e8f"
|
||||||
|
"909192939495969798999a9b9c9d9e9f"
|
||||||
|
"a0a1a2a3a4a5a6a7a8a9aaabacadaeaf"
|
||||||
|
"b0b1b2b3b4b5b6b7b8b9babbbcbdbebf"
|
||||||
|
"c0c1c2c3c4c5c6c7c8c9cacbcccdcecf"
|
||||||
|
"d0d1d2d3d4d5d6d7d8d9dadbdcdddedf"
|
||||||
|
"e0e1e2e3e4e5e6e7e8e9eaebecedeeef"
|
||||||
|
"f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff";
|
||||||
|
|
||||||
|
/// Maps 0..255 to 00000000..11111111 correspondingly.
|
||||||
|
constexpr inline std::string_view bin_byte_to_char_table = //
|
||||||
|
"0000000000000001000000100000001100000100000001010000011000000111"
|
||||||
|
"0000100000001001000010100000101100001100000011010000111000001111"
|
||||||
|
"0001000000010001000100100001001100010100000101010001011000010111"
|
||||||
|
"0001100000011001000110100001101100011100000111010001111000011111"
|
||||||
|
"0010000000100001001000100010001100100100001001010010011000100111"
|
||||||
|
"0010100000101001001010100010101100101100001011010010111000101111"
|
||||||
|
"0011000000110001001100100011001100110100001101010011011000110111"
|
||||||
|
"0011100000111001001110100011101100111100001111010011111000111111"
|
||||||
|
"0100000001000001010000100100001101000100010001010100011001000111"
|
||||||
|
"0100100001001001010010100100101101001100010011010100111001001111"
|
||||||
|
"0101000001010001010100100101001101010100010101010101011001010111"
|
||||||
|
"0101100001011001010110100101101101011100010111010101111001011111"
|
||||||
|
"0110000001100001011000100110001101100100011001010110011001100111"
|
||||||
|
"0110100001101001011010100110101101101100011011010110111001101111"
|
||||||
|
"0111000001110001011100100111001101110100011101010111011001110111"
|
||||||
|
"0111100001111001011110100111101101111100011111010111111001111111"
|
||||||
|
"1000000010000001100000101000001110000100100001011000011010000111"
|
||||||
|
"1000100010001001100010101000101110001100100011011000111010001111"
|
||||||
|
"1001000010010001100100101001001110010100100101011001011010010111"
|
||||||
|
"1001100010011001100110101001101110011100100111011001111010011111"
|
||||||
|
"1010000010100001101000101010001110100100101001011010011010100111"
|
||||||
|
"1010100010101001101010101010101110101100101011011010111010101111"
|
||||||
|
"1011000010110001101100101011001110110100101101011011011010110111"
|
||||||
|
"1011100010111001101110101011101110111100101111011011111010111111"
|
||||||
|
"1100000011000001110000101100001111000100110001011100011011000111"
|
||||||
|
"1100100011001001110010101100101111001100110011011100111011001111"
|
||||||
|
"1101000011010001110100101101001111010100110101011101011011010111"
|
||||||
|
"1101100011011001110110101101101111011100110111011101111011011111"
|
||||||
|
"1110000011100001111000101110001111100100111001011110011011100111"
|
||||||
|
"1110100011101001111010101110101111101100111011011110111011101111"
|
||||||
|
"1111000011110001111100101111001111110100111101011111011011110111"
|
||||||
|
"1111100011111001111110101111101111111100111111011111111011111111";
|
||||||
|
|
||||||
|
/// Maps 0..9, A..F, a..f to 0..15. Other chars are mapped to implementation specific value.
|
||||||
|
constexpr inline std::string_view hex_char_to_digit_table
|
||||||
|
= {"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||||
|
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||||
|
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||||
|
"\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\xff\xff\xff\xff\xff\xff" //0-9
|
||||||
|
"\xff\x0a\x0b\x0c\x0d\x0e\x0f\xff\xff\xff\xff\xff\xff\xff\xff\xff" //A-Z
|
||||||
|
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||||
|
"\xff\x0a\x0b\x0c\x0d\x0e\x0f\xff\xff\xff\xff\xff\xff\xff\xff\xff" //a-z
|
||||||
|
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||||
|
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||||
|
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||||
|
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||||
|
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||||
|
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||||
|
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||||
|
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||||
|
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff",
|
||||||
|
256};
|
||||||
|
|
||||||
|
/// Converts a hex digit '0'..'f' or '0'..'F' to its value 0..15.
|
||||||
|
constexpr UInt8 unhexDigit(char c)
|
||||||
|
{
|
||||||
|
return hex_char_to_digit_table[static_cast<UInt8>(c)];
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts an unsigned integer in the native endian to hexadecimal representation and back. Used as a base class for HexConversion<T>.
|
||||||
|
template <typename TUInt, typename = void>
|
||||||
|
struct HexConversionUInt
|
||||||
|
{
|
||||||
|
static const constexpr size_t num_hex_digits = sizeof(TUInt) * 2;
|
||||||
|
|
||||||
|
static void hex(TUInt uint_, char * out, std::string_view table)
|
||||||
|
{
|
||||||
|
union
|
||||||
|
{
|
||||||
|
TUInt value;
|
||||||
|
UInt8 uint8[sizeof(TUInt)];
|
||||||
|
};
|
||||||
|
|
||||||
|
value = uint_;
|
||||||
|
|
||||||
|
for (size_t i = 0; i < sizeof(TUInt); ++i)
|
||||||
|
{
|
||||||
|
if constexpr (std::endian::native == std::endian::little)
|
||||||
|
memcpy(out + i * 2, &table[static_cast<size_t>(uint8[sizeof(TUInt) - 1 - i]) * 2], 2);
|
||||||
|
else
|
||||||
|
memcpy(out + i * 2, &table[static_cast<size_t>(uint8[i]) * 2], 2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static TUInt unhex(const char * data)
|
||||||
|
{
|
||||||
|
TUInt res;
|
||||||
|
if constexpr (sizeof(TUInt) == 1)
|
||||||
|
{
|
||||||
|
res = static_cast<UInt8>(unhexDigit(data[0])) * 0x10 + static_cast<UInt8>(unhexDigit(data[1]));
|
||||||
|
}
|
||||||
|
else if constexpr (sizeof(TUInt) == 2)
|
||||||
|
{
|
||||||
|
res = static_cast<UInt16>(unhexDigit(data[0])) * 0x1000 + static_cast<UInt16>(unhexDigit(data[1])) * 0x100
|
||||||
|
+ static_cast<UInt16>(unhexDigit(data[2])) * 0x10 + static_cast<UInt16>(unhexDigit(data[3]));
|
||||||
|
}
|
||||||
|
else if constexpr ((sizeof(TUInt) <= 8) || ((sizeof(TUInt) % 8) != 0))
|
||||||
|
{
|
||||||
|
res = 0;
|
||||||
|
for (size_t i = 0; i < sizeof(TUInt) * 2; ++i, ++data)
|
||||||
|
{
|
||||||
|
res <<= 4;
|
||||||
|
res += unhexDigit(*data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
res = 0;
|
||||||
|
for (size_t i = 0; i < sizeof(TUInt) / 8; ++i, data += 16)
|
||||||
|
{
|
||||||
|
res <<= 64;
|
||||||
|
res += HexConversionUInt<UInt64>::unhex(data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Helper template class to convert a value of any supported type to hexadecimal representation and back.
|
||||||
|
template <typename T, typename SFINAE = void>
|
||||||
|
struct HexConversion;
|
||||||
|
|
||||||
|
template <typename TUInt>
|
||||||
|
struct HexConversion<TUInt, std::enable_if_t<std::is_integral_v<TUInt>>> : public HexConversionUInt<TUInt> {};
|
||||||
|
|
||||||
|
template <size_t Bits, typename Signed>
|
||||||
|
struct HexConversion<wide::integer<Bits, Signed>> : public HexConversionUInt<wide::integer<Bits, Signed>> {};
|
||||||
|
|
||||||
|
template <typename CityHashUInt128> /// Partial specialization here allows not to include <city.h> in this header.
|
||||||
|
struct HexConversion<CityHashUInt128, std::enable_if_t<std::is_same_v<CityHashUInt128, typename CityHash_v1_0_2::uint128>>>
|
||||||
|
{
|
||||||
|
static const constexpr size_t num_hex_digits = 32;
|
||||||
|
|
||||||
|
static void hex(const CityHashUInt128 & uint_, char * out, std::string_view table)
|
||||||
|
{
|
||||||
|
HexConversion<UInt64>::hex(uint_.high64, out, table);
|
||||||
|
HexConversion<UInt64>::hex(uint_.low64, out + 16, table);
|
||||||
|
}
|
||||||
|
|
||||||
|
static CityHashUInt128 unhex(const char * data)
|
||||||
|
{
|
||||||
|
CityHashUInt128 res;
|
||||||
|
res.high64 = HexConversion<UInt64>::unhex(data);
|
||||||
|
res.low64 = HexConversion<UInt64>::unhex(data + 16);
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Produces a hexadecimal representation of an integer value with leading zeros (for checksums).
|
||||||
|
/// The function supports native integer types, wide::integer, CityHash_v1_0_2::uint128.
|
||||||
|
/// It can be used with signed types as well, however they are written as corresponding unsigned numbers
|
||||||
|
/// using two's complement (i.e. for example "-1" is written as "0xFF", not as "-0x01").
|
||||||
|
template <typename T>
|
||||||
|
void writeHexUIntUppercase(const T & value, char * out)
|
||||||
|
{
|
||||||
|
impl::HexConversion<T>::hex(value, out, impl::hex_byte_to_char_uppercase_table);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
void writeHexUIntLowercase(const T & value, char * out)
|
||||||
|
{
|
||||||
|
impl::HexConversion<T>::hex(value, out, impl::hex_byte_to_char_lowercase_table);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
std::string getHexUIntUppercase(const T & value)
|
||||||
|
{
|
||||||
|
std::string res(impl::HexConversion<T>::num_hex_digits, '\0');
|
||||||
|
writeHexUIntUppercase(value, res.data());
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
std::string getHexUIntLowercase(const T & value)
|
||||||
|
{
|
||||||
|
std::string res(impl::HexConversion<T>::num_hex_digits, '\0');
|
||||||
|
writeHexUIntLowercase(value, res.data());
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
constexpr char hexDigitUppercase(unsigned char c)
|
constexpr char hexDigitUppercase(unsigned char c)
|
||||||
{
|
{
|
||||||
return hex_digit_to_char_uppercase_table[c];
|
return impl::hex_digit_to_char_uppercase_table[c];
|
||||||
}
|
}
|
||||||
|
|
||||||
constexpr char hexDigitLowercase(unsigned char c)
|
constexpr char hexDigitLowercase(unsigned char c)
|
||||||
{
|
{
|
||||||
return hex_digit_to_char_lowercase_table[c];
|
return impl::hex_digit_to_char_lowercase_table[c];
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Maps 0..255 to 00..FF or 00..ff correspondingly
|
|
||||||
|
|
||||||
constexpr inline std::string_view hex_byte_to_char_uppercase_table = //
|
|
||||||
"000102030405060708090A0B0C0D0E0F"
|
|
||||||
"101112131415161718191A1B1C1D1E1F"
|
|
||||||
"202122232425262728292A2B2C2D2E2F"
|
|
||||||
"303132333435363738393A3B3C3D3E3F"
|
|
||||||
"404142434445464748494A4B4C4D4E4F"
|
|
||||||
"505152535455565758595A5B5C5D5E5F"
|
|
||||||
"606162636465666768696A6B6C6D6E6F"
|
|
||||||
"707172737475767778797A7B7C7D7E7F"
|
|
||||||
"808182838485868788898A8B8C8D8E8F"
|
|
||||||
"909192939495969798999A9B9C9D9E9F"
|
|
||||||
"A0A1A2A3A4A5A6A7A8A9AAABACADAEAF"
|
|
||||||
"B0B1B2B3B4B5B6B7B8B9BABBBCBDBEBF"
|
|
||||||
"C0C1C2C3C4C5C6C7C8C9CACBCCCDCECF"
|
|
||||||
"D0D1D2D3D4D5D6D7D8D9DADBDCDDDEDF"
|
|
||||||
"E0E1E2E3E4E5E6E7E8E9EAEBECEDEEEF"
|
|
||||||
"F0F1F2F3F4F5F6F7F8F9FAFBFCFDFEFF";
|
|
||||||
|
|
||||||
constexpr inline std::string_view hex_byte_to_char_lowercase_table = //
|
|
||||||
"000102030405060708090a0b0c0d0e0f"
|
|
||||||
"101112131415161718191a1b1c1d1e1f"
|
|
||||||
"202122232425262728292a2b2c2d2e2f"
|
|
||||||
"303132333435363738393a3b3c3d3e3f"
|
|
||||||
"404142434445464748494a4b4c4d4e4f"
|
|
||||||
"505152535455565758595a5b5c5d5e5f"
|
|
||||||
"606162636465666768696a6b6c6d6e6f"
|
|
||||||
"707172737475767778797a7b7c7d7e7f"
|
|
||||||
"808182838485868788898a8b8c8d8e8f"
|
|
||||||
"909192939495969798999a9b9c9d9e9f"
|
|
||||||
"a0a1a2a3a4a5a6a7a8a9aaabacadaeaf"
|
|
||||||
"b0b1b2b3b4b5b6b7b8b9babbbcbdbebf"
|
|
||||||
"c0c1c2c3c4c5c6c7c8c9cacbcccdcecf"
|
|
||||||
"d0d1d2d3d4d5d6d7d8d9dadbdcdddedf"
|
|
||||||
"e0e1e2e3e4e5e6e7e8e9eaebecedeeef"
|
|
||||||
"f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff";
|
|
||||||
|
|
||||||
inline void writeHexByteUppercase(UInt8 byte, void * out)
|
inline void writeHexByteUppercase(UInt8 byte, void * out)
|
||||||
{
|
{
|
||||||
memcpy(out, &hex_byte_to_char_uppercase_table[static_cast<size_t>(byte) * 2], 2);
|
memcpy(out, &impl::hex_byte_to_char_uppercase_table[static_cast<size_t>(byte) * 2], 2);
|
||||||
}
|
}
|
||||||
|
|
||||||
inline void writeHexByteLowercase(UInt8 byte, void * out)
|
inline void writeHexByteLowercase(UInt8 byte, void * out)
|
||||||
{
|
{
|
||||||
memcpy(out, &hex_byte_to_char_lowercase_table[static_cast<size_t>(byte) * 2], 2);
|
memcpy(out, &impl::hex_byte_to_char_lowercase_table[static_cast<size_t>(byte) * 2], 2);
|
||||||
}
|
}
|
||||||
|
|
||||||
constexpr inline std::string_view bin_byte_to_char_table = //
|
/// Converts a hex representation with leading zeros back to an integer value.
|
||||||
"0000000000000001000000100000001100000100000001010000011000000111"
|
/// The function supports native integer types, wide::integer, CityHash_v1_0_2::uint128.
|
||||||
"0000100000001001000010100000101100001100000011010000111000001111"
|
template <typename T>
|
||||||
"0001000000010001000100100001001100010100000101010001011000010111"
|
constexpr T unhexUInt(const char * data)
|
||||||
"0001100000011001000110100001101100011100000111010001111000011111"
|
|
||||||
"0010000000100001001000100010001100100100001001010010011000100111"
|
|
||||||
"0010100000101001001010100010101100101100001011010010111000101111"
|
|
||||||
"0011000000110001001100100011001100110100001101010011011000110111"
|
|
||||||
"0011100000111001001110100011101100111100001111010011111000111111"
|
|
||||||
"0100000001000001010000100100001101000100010001010100011001000111"
|
|
||||||
"0100100001001001010010100100101101001100010011010100111001001111"
|
|
||||||
"0101000001010001010100100101001101010100010101010101011001010111"
|
|
||||||
"0101100001011001010110100101101101011100010111010101111001011111"
|
|
||||||
"0110000001100001011000100110001101100100011001010110011001100111"
|
|
||||||
"0110100001101001011010100110101101101100011011010110111001101111"
|
|
||||||
"0111000001110001011100100111001101110100011101010111011001110111"
|
|
||||||
"0111100001111001011110100111101101111100011111010111111001111111"
|
|
||||||
"1000000010000001100000101000001110000100100001011000011010000111"
|
|
||||||
"1000100010001001100010101000101110001100100011011000111010001111"
|
|
||||||
"1001000010010001100100101001001110010100100101011001011010010111"
|
|
||||||
"1001100010011001100110101001101110011100100111011001111010011111"
|
|
||||||
"1010000010100001101000101010001110100100101001011010011010100111"
|
|
||||||
"1010100010101001101010101010101110101100101011011010111010101111"
|
|
||||||
"1011000010110001101100101011001110110100101101011011011010110111"
|
|
||||||
"1011100010111001101110101011101110111100101111011011111010111111"
|
|
||||||
"1100000011000001110000101100001111000100110001011100011011000111"
|
|
||||||
"1100100011001001110010101100101111001100110011011100111011001111"
|
|
||||||
"1101000011010001110100101101001111010100110101011101011011010111"
|
|
||||||
"1101100011011001110110101101101111011100110111011101111011011111"
|
|
||||||
"1110000011100001111000101110001111100100111001011110011011100111"
|
|
||||||
"1110100011101001111010101110101111101100111011011110111011101111"
|
|
||||||
"1111000011110001111100101111001111110100111101011111011011110111"
|
|
||||||
"1111100011111001111110101111101111111100111111011111111011111111";
|
|
||||||
|
|
||||||
inline void writeBinByte(UInt8 byte, void * out)
|
|
||||||
{
|
{
|
||||||
memcpy(out, &bin_byte_to_char_table[static_cast<size_t>(byte) * 8], 8);
|
return impl::HexConversion<T>::unhex(data);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Produces hex representation of an unsigned int with leading zeros (for checksums)
|
/// Converts a hexadecimal digit '0'..'f' or '0'..'F' to UInt8.
|
||||||
template <typename TUInt>
|
|
||||||
inline void writeHexUIntImpl(TUInt uint_, char * out, std::string_view table)
|
|
||||||
{
|
|
||||||
union
|
|
||||||
{
|
|
||||||
TUInt value;
|
|
||||||
UInt8 uint8[sizeof(TUInt)];
|
|
||||||
};
|
|
||||||
|
|
||||||
value = uint_;
|
|
||||||
|
|
||||||
for (size_t i = 0; i < sizeof(TUInt); ++i)
|
|
||||||
{
|
|
||||||
if constexpr (std::endian::native == std::endian::little)
|
|
||||||
memcpy(out + i * 2, &table[static_cast<size_t>(uint8[sizeof(TUInt) - 1 - i]) * 2], 2);
|
|
||||||
else
|
|
||||||
memcpy(out + i * 2, &table[static_cast<size_t>(uint8[i]) * 2], 2);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
template <typename TUInt>
|
|
||||||
inline void writeHexUIntUppercase(TUInt uint_, char * out)
|
|
||||||
{
|
|
||||||
writeHexUIntImpl(uint_, out, hex_byte_to_char_uppercase_table);
|
|
||||||
}
|
|
||||||
|
|
||||||
template <typename TUInt>
|
|
||||||
inline void writeHexUIntLowercase(TUInt uint_, char * out)
|
|
||||||
{
|
|
||||||
writeHexUIntImpl(uint_, out, hex_byte_to_char_lowercase_table);
|
|
||||||
}
|
|
||||||
|
|
||||||
template <typename TUInt>
|
|
||||||
std::string getHexUIntUppercase(TUInt uint_)
|
|
||||||
{
|
|
||||||
std::string res(sizeof(TUInt) * 2, '\0');
|
|
||||||
writeHexUIntUppercase(uint_, res.data());
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
template <typename TUInt>
|
|
||||||
std::string getHexUIntLowercase(TUInt uint_)
|
|
||||||
{
|
|
||||||
std::string res(sizeof(TUInt) * 2, '\0');
|
|
||||||
writeHexUIntLowercase(uint_, res.data());
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Maps 0..9, A..F, a..f to 0..15. Other chars are mapped to implementation specific value.
|
|
||||||
|
|
||||||
constexpr inline std::string_view hex_char_to_digit_table
|
|
||||||
= {"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
|
||||||
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
|
||||||
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
|
||||||
"\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\xff\xff\xff\xff\xff\xff" //0-9
|
|
||||||
"\xff\x0a\x0b\x0c\x0d\x0e\x0f\xff\xff\xff\xff\xff\xff\xff\xff\xff" //A-Z
|
|
||||||
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
|
||||||
"\xff\x0a\x0b\x0c\x0d\x0e\x0f\xff\xff\xff\xff\xff\xff\xff\xff\xff" //a-z
|
|
||||||
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
|
||||||
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
|
||||||
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
|
||||||
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
|
||||||
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
|
||||||
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
|
||||||
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
|
||||||
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
|
||||||
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff",
|
|
||||||
256};
|
|
||||||
|
|
||||||
constexpr UInt8 unhex(char c)
|
constexpr UInt8 unhex(char c)
|
||||||
{
|
{
|
||||||
return hex_char_to_digit_table[static_cast<UInt8>(c)];
|
return impl::unhexDigit(c);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Converts two hexadecimal digits to UInt8.
|
||||||
constexpr UInt8 unhex2(const char * data)
|
constexpr UInt8 unhex2(const char * data)
|
||||||
{
|
{
|
||||||
return static_cast<UInt8>(unhex(data[0])) * 0x10 + static_cast<UInt8>(unhex(data[1]));
|
return unhexUInt<UInt8>(data);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Converts four hexadecimal digits to UInt16.
|
||||||
constexpr UInt16 unhex4(const char * data)
|
constexpr UInt16 unhex4(const char * data)
|
||||||
{
|
{
|
||||||
return static_cast<UInt16>(unhex(data[0])) * 0x1000 + static_cast<UInt16>(unhex(data[1])) * 0x100
|
return unhexUInt<UInt16>(data);
|
||||||
+ static_cast<UInt16>(unhex(data[2])) * 0x10 + static_cast<UInt16>(unhex(data[3]));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename TUInt>
|
/// Produces a binary representation of a single byte.
|
||||||
constexpr TUInt unhexUInt(const char * data)
|
inline void writeBinByte(UInt8 byte, void * out)
|
||||||
{
|
{
|
||||||
TUInt res = 0;
|
memcpy(out, &impl::bin_byte_to_char_table[static_cast<size_t>(byte) * 8], 8);
|
||||||
if constexpr ((sizeof(TUInt) <= 8) || ((sizeof(TUInt) % 8) != 0))
|
|
||||||
{
|
|
||||||
for (size_t i = 0; i < sizeof(TUInt) * 2; ++i, ++data)
|
|
||||||
{
|
|
||||||
res <<= 4;
|
|
||||||
res += unhex(*data);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
for (size_t i = 0; i < sizeof(TUInt) / 8; ++i, data += 16)
|
|
||||||
{
|
|
||||||
res <<= 64;
|
|
||||||
res += unhexUInt<UInt64>(data);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return res;
|
|
||||||
}
|
}
|
||||||
|
@ -11,3 +11,8 @@ constexpr double interpolateExponential(double min, double max, double ratio)
|
|||||||
assert(min > 0 && ratio >= 0 && ratio <= 1);
|
assert(min > 0 && ratio >= 0 && ratio <= 1);
|
||||||
return min * std::pow(max / min, ratio);
|
return min * std::pow(max / min, ratio);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
constexpr double interpolateLinear(double min, double max, double ratio)
|
||||||
|
{
|
||||||
|
return std::lerp(min, max, ratio);
|
||||||
|
}
|
||||||
|
9
base/base/move_extend.h
Normal file
9
base/base/move_extend.h
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
/// Extend @p to by moving elements from @p from to @p to end
|
||||||
|
/// @return @p to iterator to first of moved elements.
|
||||||
|
template <class To, class From>
|
||||||
|
typename To::iterator moveExtend(To & to, From && from)
|
||||||
|
{
|
||||||
|
return to.insert(to.end(), std::make_move_iterator(from.begin()), std::make_move_iterator(from.end()));
|
||||||
|
}
|
@ -27,6 +27,8 @@ using FromDoubleIntermediateType = long double;
|
|||||||
using FromDoubleIntermediateType = boost::multiprecision::cpp_bin_float_double_extended;
|
using FromDoubleIntermediateType = boost::multiprecision::cpp_bin_float_double_extended;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
namespace CityHash_v1_0_2 { struct uint128; }
|
||||||
|
|
||||||
namespace wide
|
namespace wide
|
||||||
{
|
{
|
||||||
|
|
||||||
@ -281,6 +283,17 @@ struct integer<Bits, Signed>::_impl
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template <typename CityHashUInt128 = CityHash_v1_0_2::uint128>
|
||||||
|
constexpr static void wide_integer_from_cityhash_uint128(integer<Bits, Signed> & self, const CityHashUInt128 & value) noexcept
|
||||||
|
{
|
||||||
|
static_assert(sizeof(item_count) >= 2);
|
||||||
|
|
||||||
|
if constexpr (std::endian::native == std::endian::little)
|
||||||
|
wide_integer_from_tuple_like(self, std::make_pair(value.low64, value.high64));
|
||||||
|
else
|
||||||
|
wide_integer_from_tuple_like(self, std::make_pair(value.high64, value.low64));
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* N.B. t is constructed from double, so max(t) = max(double) ~ 2^310
|
* N.B. t is constructed from double, so max(t) = max(double) ~ 2^310
|
||||||
* the recursive call happens when t / 2^64 > 2^64, so there won't be more than 5 of them.
|
* the recursive call happens when t / 2^64 > 2^64, so there won't be more than 5 of them.
|
||||||
@ -1036,6 +1049,8 @@ constexpr integer<Bits, Signed>::integer(T rhs) noexcept
|
|||||||
_impl::wide_integer_from_wide_integer(*this, rhs);
|
_impl::wide_integer_from_wide_integer(*this, rhs);
|
||||||
else if constexpr (IsTupleLike<T>::value)
|
else if constexpr (IsTupleLike<T>::value)
|
||||||
_impl::wide_integer_from_tuple_like(*this, rhs);
|
_impl::wide_integer_from_tuple_like(*this, rhs);
|
||||||
|
else if constexpr (std::is_same_v<std::remove_cvref_t<T>, CityHash_v1_0_2::uint128>)
|
||||||
|
_impl::wide_integer_from_cityhash_uint128(*this, rhs);
|
||||||
else
|
else
|
||||||
_impl::wide_integer_from_builtin(*this, rhs);
|
_impl::wide_integer_from_builtin(*this, rhs);
|
||||||
}
|
}
|
||||||
@ -1051,6 +1066,8 @@ constexpr integer<Bits, Signed>::integer(std::initializer_list<T> il) noexcept
|
|||||||
_impl::wide_integer_from_wide_integer(*this, *il.begin());
|
_impl::wide_integer_from_wide_integer(*this, *il.begin());
|
||||||
else if constexpr (IsTupleLike<T>::value)
|
else if constexpr (IsTupleLike<T>::value)
|
||||||
_impl::wide_integer_from_tuple_like(*this, *il.begin());
|
_impl::wide_integer_from_tuple_like(*this, *il.begin());
|
||||||
|
else if constexpr (std::is_same_v<std::remove_cvref_t<T>, CityHash_v1_0_2::uint128>)
|
||||||
|
_impl::wide_integer_from_cityhash_uint128(*this, *il.begin());
|
||||||
else
|
else
|
||||||
_impl::wide_integer_from_builtin(*this, *il.begin());
|
_impl::wide_integer_from_builtin(*this, *il.begin());
|
||||||
}
|
}
|
||||||
@ -1088,6 +1105,8 @@ constexpr integer<Bits, Signed> & integer<Bits, Signed>::operator=(T rhs) noexce
|
|||||||
{
|
{
|
||||||
if constexpr (IsTupleLike<T>::value)
|
if constexpr (IsTupleLike<T>::value)
|
||||||
_impl::wide_integer_from_tuple_like(*this, rhs);
|
_impl::wide_integer_from_tuple_like(*this, rhs);
|
||||||
|
else if constexpr (std::is_same_v<std::remove_cvref_t<T>, CityHash_v1_0_2::uint128>)
|
||||||
|
_impl::wide_integer_from_cityhash_uint128(*this, rhs);
|
||||||
else
|
else
|
||||||
_impl::wide_integer_from_builtin(*this, rhs);
|
_impl::wide_integer_from_builtin(*this, rhs);
|
||||||
return *this;
|
return *this;
|
||||||
|
@ -87,7 +87,6 @@ set (SRCS
|
|||||||
src/LoggingRegistry.cpp
|
src/LoggingRegistry.cpp
|
||||||
src/LogStream.cpp
|
src/LogStream.cpp
|
||||||
src/MD5Engine.cpp
|
src/MD5Engine.cpp
|
||||||
src/MemoryPool.cpp
|
|
||||||
src/MemoryStream.cpp
|
src/MemoryStream.cpp
|
||||||
src/Message.cpp
|
src/Message.cpp
|
||||||
src/Mutex.cpp
|
src/Mutex.cpp
|
||||||
|
@ -1,116 +0,0 @@
|
|||||||
//
|
|
||||||
// MemoryPool.h
|
|
||||||
//
|
|
||||||
// Library: Foundation
|
|
||||||
// Package: Core
|
|
||||||
// Module: MemoryPool
|
|
||||||
//
|
|
||||||
// Definition of the MemoryPool class.
|
|
||||||
//
|
|
||||||
// Copyright (c) 2005-2006, Applied Informatics Software Engineering GmbH.
|
|
||||||
// and Contributors.
|
|
||||||
//
|
|
||||||
// SPDX-License-Identifier: BSL-1.0
|
|
||||||
//
|
|
||||||
|
|
||||||
|
|
||||||
#ifndef Foundation_MemoryPool_INCLUDED
|
|
||||||
#define Foundation_MemoryPool_INCLUDED
|
|
||||||
|
|
||||||
|
|
||||||
#include <cstddef>
|
|
||||||
#include <vector>
|
|
||||||
#include "Poco/Foundation.h"
|
|
||||||
#include "Poco/Mutex.h"
|
|
||||||
|
|
||||||
|
|
||||||
namespace Poco
|
|
||||||
{
|
|
||||||
|
|
||||||
|
|
||||||
class Foundation_API MemoryPool
|
|
||||||
/// A simple pool for fixed-size memory blocks.
|
|
||||||
///
|
|
||||||
/// The main purpose of this class is to speed-up
|
|
||||||
/// memory allocations, as well as to reduce memory
|
|
||||||
/// fragmentation in situations where the same blocks
|
|
||||||
/// are allocated all over again, such as in server
|
|
||||||
/// applications.
|
|
||||||
///
|
|
||||||
/// All allocated blocks are retained for future use.
|
|
||||||
/// A limit on the number of blocks can be specified.
|
|
||||||
/// Blocks can be preallocated.
|
|
||||||
{
|
|
||||||
public:
|
|
||||||
MemoryPool(std::size_t blockSize, int preAlloc = 0, int maxAlloc = 0);
|
|
||||||
/// Creates a MemoryPool for blocks with the given blockSize.
|
|
||||||
/// The number of blocks given in preAlloc are preallocated.
|
|
||||||
|
|
||||||
~MemoryPool();
|
|
||||||
|
|
||||||
void * get();
|
|
||||||
/// Returns a memory block. If there are no more blocks
|
|
||||||
/// in the pool, a new block will be allocated.
|
|
||||||
///
|
|
||||||
/// If maxAlloc blocks are already allocated, an
|
|
||||||
/// OutOfMemoryException is thrown.
|
|
||||||
|
|
||||||
void release(void * ptr);
|
|
||||||
/// Releases a memory block and returns it to the pool.
|
|
||||||
|
|
||||||
std::size_t blockSize() const;
|
|
||||||
/// Returns the block size.
|
|
||||||
|
|
||||||
int allocated() const;
|
|
||||||
/// Returns the number of allocated blocks.
|
|
||||||
|
|
||||||
int available() const;
|
|
||||||
/// Returns the number of available blocks in the pool.
|
|
||||||
|
|
||||||
private:
|
|
||||||
MemoryPool();
|
|
||||||
MemoryPool(const MemoryPool &);
|
|
||||||
MemoryPool & operator=(const MemoryPool &);
|
|
||||||
|
|
||||||
void clear();
|
|
||||||
|
|
||||||
enum
|
|
||||||
{
|
|
||||||
BLOCK_RESERVE = 128
|
|
||||||
};
|
|
||||||
|
|
||||||
typedef std::vector<char *> BlockVec;
|
|
||||||
|
|
||||||
std::size_t _blockSize;
|
|
||||||
int _maxAlloc;
|
|
||||||
int _allocated;
|
|
||||||
BlockVec _blocks;
|
|
||||||
FastMutex _mutex;
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
//
|
|
||||||
// inlines
|
|
||||||
//
|
|
||||||
inline std::size_t MemoryPool::blockSize() const
|
|
||||||
{
|
|
||||||
return _blockSize;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
inline int MemoryPool::allocated() const
|
|
||||||
{
|
|
||||||
return _allocated;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
inline int MemoryPool::available() const
|
|
||||||
{
|
|
||||||
return (int)_blocks.size();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
} // namespace Poco
|
|
||||||
|
|
||||||
|
|
||||||
#endif // Foundation_MemoryPool_INCLUDED
|
|
@ -67,6 +67,8 @@ public:
|
|||||||
|
|
||||||
Message(
|
Message(
|
||||||
const std::string & source, const std::string & text, Priority prio, const char * file, int line, std::string_view fmt_str = {});
|
const std::string & source, const std::string & text, Priority prio, const char * file, int line, std::string_view fmt_str = {});
|
||||||
|
Message(
|
||||||
|
std::string && source, std::string && text, Priority prio, const char * file, int line, std::string_view fmt_str);
|
||||||
/// Creates a Message with the given source, text, priority,
|
/// Creates a Message with the given source, text, priority,
|
||||||
/// source file path and line.
|
/// source file path and line.
|
||||||
///
|
///
|
||||||
|
@ -57,7 +57,7 @@ public:
|
|||||||
URI();
|
URI();
|
||||||
/// Creates an empty URI.
|
/// Creates an empty URI.
|
||||||
|
|
||||||
explicit URI(const std::string & uri);
|
explicit URI(const std::string & uri, bool disable_url_encoding = false);
|
||||||
/// Parses an URI from the given string. Throws a
|
/// Parses an URI from the given string. Throws a
|
||||||
/// SyntaxException if the uri is not valid.
|
/// SyntaxException if the uri is not valid.
|
||||||
|
|
||||||
@ -350,6 +350,10 @@ protected:
|
|||||||
static const std::string ILLEGAL;
|
static const std::string ILLEGAL;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
void encodePath(std::string & encodedStr) const;
|
||||||
|
void decodePath(const std::string & encodedStr);
|
||||||
|
|
||||||
|
|
||||||
std::string _scheme;
|
std::string _scheme;
|
||||||
std::string _userInfo;
|
std::string _userInfo;
|
||||||
std::string _host;
|
std::string _host;
|
||||||
@ -357,6 +361,8 @@ private:
|
|||||||
std::string _path;
|
std::string _path;
|
||||||
std::string _query;
|
std::string _query;
|
||||||
std::string _fragment;
|
std::string _fragment;
|
||||||
|
|
||||||
|
bool _disable_url_encoding = false;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,105 +0,0 @@
|
|||||||
//
|
|
||||||
// MemoryPool.cpp
|
|
||||||
//
|
|
||||||
// Library: Foundation
|
|
||||||
// Package: Core
|
|
||||||
// Module: MemoryPool
|
|
||||||
//
|
|
||||||
// Copyright (c) 2005-2006, Applied Informatics Software Engineering GmbH.
|
|
||||||
// and Contributors.
|
|
||||||
//
|
|
||||||
// SPDX-License-Identifier: BSL-1.0
|
|
||||||
//
|
|
||||||
|
|
||||||
|
|
||||||
#include "Poco/MemoryPool.h"
|
|
||||||
#include "Poco/Exception.h"
|
|
||||||
|
|
||||||
|
|
||||||
namespace Poco {
|
|
||||||
|
|
||||||
|
|
||||||
MemoryPool::MemoryPool(std::size_t blockSize, int preAlloc, int maxAlloc):
|
|
||||||
_blockSize(blockSize),
|
|
||||||
_maxAlloc(maxAlloc),
|
|
||||||
_allocated(preAlloc)
|
|
||||||
{
|
|
||||||
poco_assert (maxAlloc == 0 || maxAlloc >= preAlloc);
|
|
||||||
poco_assert (preAlloc >= 0 && maxAlloc >= 0);
|
|
||||||
|
|
||||||
int r = BLOCK_RESERVE;
|
|
||||||
if (preAlloc > r)
|
|
||||||
r = preAlloc;
|
|
||||||
if (maxAlloc > 0 && maxAlloc < r)
|
|
||||||
r = maxAlloc;
|
|
||||||
_blocks.reserve(r);
|
|
||||||
|
|
||||||
try
|
|
||||||
{
|
|
||||||
for (int i = 0; i < preAlloc; ++i)
|
|
||||||
{
|
|
||||||
_blocks.push_back(new char[_blockSize]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (...)
|
|
||||||
{
|
|
||||||
clear();
|
|
||||||
throw;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
MemoryPool::~MemoryPool()
|
|
||||||
{
|
|
||||||
clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void MemoryPool::clear()
|
|
||||||
{
|
|
||||||
for (BlockVec::iterator it = _blocks.begin(); it != _blocks.end(); ++it)
|
|
||||||
{
|
|
||||||
delete [] *it;
|
|
||||||
}
|
|
||||||
_blocks.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void* MemoryPool::get()
|
|
||||||
{
|
|
||||||
FastMutex::ScopedLock lock(_mutex);
|
|
||||||
|
|
||||||
if (_blocks.empty())
|
|
||||||
{
|
|
||||||
if (_maxAlloc == 0 || _allocated < _maxAlloc)
|
|
||||||
{
|
|
||||||
++_allocated;
|
|
||||||
return new char[_blockSize];
|
|
||||||
}
|
|
||||||
else throw OutOfMemoryException("MemoryPool exhausted");
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
char* ptr = _blocks.back();
|
|
||||||
_blocks.pop_back();
|
|
||||||
return ptr;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void MemoryPool::release(void* ptr)
|
|
||||||
{
|
|
||||||
FastMutex::ScopedLock lock(_mutex);
|
|
||||||
|
|
||||||
try
|
|
||||||
{
|
|
||||||
_blocks.push_back(reinterpret_cast<char*>(ptr));
|
|
||||||
}
|
|
||||||
catch (...)
|
|
||||||
{
|
|
||||||
delete [] reinterpret_cast<char*>(ptr);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
} // namespace Poco
|
|
@ -60,6 +60,19 @@ Message::Message(const std::string& source, const std::string& text, Priority pr
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Message::Message(std::string && source, std::string && text, Priority prio, const char * file, int line, std::string_view fmt_str):
|
||||||
|
_source(std::move(source)),
|
||||||
|
_text(std::move(text)),
|
||||||
|
_prio(prio),
|
||||||
|
_tid(0),
|
||||||
|
_file(file),
|
||||||
|
_line(line),
|
||||||
|
_pMap(0),
|
||||||
|
_fmt_str(fmt_str)
|
||||||
|
{
|
||||||
|
init();
|
||||||
|
}
|
||||||
|
|
||||||
Message::Message(const Message& msg):
|
Message::Message(const Message& msg):
|
||||||
_source(msg._source),
|
_source(msg._source),
|
||||||
_text(msg._text),
|
_text(msg._text),
|
||||||
|
@ -36,8 +36,8 @@ URI::URI():
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
URI::URI(const std::string& uri):
|
URI::URI(const std::string& uri, bool decode_and_encode_path):
|
||||||
_port(0)
|
_port(0), _disable_url_encoding(decode_and_encode_path)
|
||||||
{
|
{
|
||||||
parse(uri);
|
parse(uri);
|
||||||
}
|
}
|
||||||
@ -107,7 +107,8 @@ URI::URI(const URI& uri):
|
|||||||
_port(uri._port),
|
_port(uri._port),
|
||||||
_path(uri._path),
|
_path(uri._path),
|
||||||
_query(uri._query),
|
_query(uri._query),
|
||||||
_fragment(uri._fragment)
|
_fragment(uri._fragment),
|
||||||
|
_disable_url_encoding(uri._disable_url_encoding)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -119,7 +120,8 @@ URI::URI(const URI& baseURI, const std::string& relativeURI):
|
|||||||
_port(baseURI._port),
|
_port(baseURI._port),
|
||||||
_path(baseURI._path),
|
_path(baseURI._path),
|
||||||
_query(baseURI._query),
|
_query(baseURI._query),
|
||||||
_fragment(baseURI._fragment)
|
_fragment(baseURI._fragment),
|
||||||
|
_disable_url_encoding(baseURI._disable_url_encoding)
|
||||||
{
|
{
|
||||||
resolve(relativeURI);
|
resolve(relativeURI);
|
||||||
}
|
}
|
||||||
@ -151,6 +153,7 @@ URI& URI::operator = (const URI& uri)
|
|||||||
_path = uri._path;
|
_path = uri._path;
|
||||||
_query = uri._query;
|
_query = uri._query;
|
||||||
_fragment = uri._fragment;
|
_fragment = uri._fragment;
|
||||||
|
_disable_url_encoding = uri._disable_url_encoding;
|
||||||
}
|
}
|
||||||
return *this;
|
return *this;
|
||||||
}
|
}
|
||||||
@ -181,6 +184,7 @@ void URI::swap(URI& uri)
|
|||||||
std::swap(_path, uri._path);
|
std::swap(_path, uri._path);
|
||||||
std::swap(_query, uri._query);
|
std::swap(_query, uri._query);
|
||||||
std::swap(_fragment, uri._fragment);
|
std::swap(_fragment, uri._fragment);
|
||||||
|
std::swap(_disable_url_encoding, uri._disable_url_encoding);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -201,7 +205,7 @@ std::string URI::toString() const
|
|||||||
std::string uri;
|
std::string uri;
|
||||||
if (isRelative())
|
if (isRelative())
|
||||||
{
|
{
|
||||||
encode(_path, RESERVED_PATH, uri);
|
encodePath(uri);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@ -217,7 +221,7 @@ std::string URI::toString() const
|
|||||||
{
|
{
|
||||||
if (!auth.empty() && _path[0] != '/')
|
if (!auth.empty() && _path[0] != '/')
|
||||||
uri += '/';
|
uri += '/';
|
||||||
encode(_path, RESERVED_PATH, uri);
|
encodePath(uri);
|
||||||
}
|
}
|
||||||
else if (!_query.empty() || !_fragment.empty())
|
else if (!_query.empty() || !_fragment.empty())
|
||||||
{
|
{
|
||||||
@ -313,7 +317,7 @@ void URI::setAuthority(const std::string& authority)
|
|||||||
void URI::setPath(const std::string& path)
|
void URI::setPath(const std::string& path)
|
||||||
{
|
{
|
||||||
_path.clear();
|
_path.clear();
|
||||||
decode(path, _path);
|
decodePath(path);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -418,7 +422,7 @@ void URI::setPathEtc(const std::string& pathEtc)
|
|||||||
std::string URI::getPathEtc() const
|
std::string URI::getPathEtc() const
|
||||||
{
|
{
|
||||||
std::string pathEtc;
|
std::string pathEtc;
|
||||||
encode(_path, RESERVED_PATH, pathEtc);
|
encodePath(pathEtc);
|
||||||
if (!_query.empty())
|
if (!_query.empty())
|
||||||
{
|
{
|
||||||
pathEtc += '?';
|
pathEtc += '?';
|
||||||
@ -436,7 +440,7 @@ std::string URI::getPathEtc() const
|
|||||||
std::string URI::getPathAndQuery() const
|
std::string URI::getPathAndQuery() const
|
||||||
{
|
{
|
||||||
std::string pathAndQuery;
|
std::string pathAndQuery;
|
||||||
encode(_path, RESERVED_PATH, pathAndQuery);
|
encodePath(pathAndQuery);
|
||||||
if (!_query.empty())
|
if (!_query.empty())
|
||||||
{
|
{
|
||||||
pathAndQuery += '?';
|
pathAndQuery += '?';
|
||||||
@ -681,6 +685,21 @@ void URI::decode(const std::string& str, std::string& decodedStr, bool plusAsSpa
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void URI::encodePath(std::string & encodedStr) const
|
||||||
|
{
|
||||||
|
if (_disable_url_encoding)
|
||||||
|
encodedStr = _path;
|
||||||
|
else
|
||||||
|
encode(_path, RESERVED_PATH, encodedStr);
|
||||||
|
}
|
||||||
|
|
||||||
|
void URI::decodePath(const std::string & encodedStr)
|
||||||
|
{
|
||||||
|
if (_disable_url_encoding)
|
||||||
|
_path = encodedStr;
|
||||||
|
else
|
||||||
|
decode(encodedStr, _path);
|
||||||
|
}
|
||||||
|
|
||||||
bool URI::isWellKnownPort() const
|
bool URI::isWellKnownPort() const
|
||||||
{
|
{
|
||||||
@ -820,7 +839,7 @@ void URI::parsePath(std::string::const_iterator& it, const std::string::const_it
|
|||||||
{
|
{
|
||||||
std::string path;
|
std::string path;
|
||||||
while (it != end && *it != '?' && *it != '#') path += *it++;
|
while (it != end && *it != '?' && *it != '#') path += *it++;
|
||||||
decode(path, _path);
|
decodePath(path);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -19,7 +19,6 @@
|
|||||||
|
|
||||||
|
|
||||||
#include "Poco/BufferedStreamBuf.h"
|
#include "Poco/BufferedStreamBuf.h"
|
||||||
#include "Poco/Net/HTTPBufferAllocator.h"
|
|
||||||
#include "Poco/Net/Net.h"
|
#include "Poco/Net/Net.h"
|
||||||
|
|
||||||
|
|
||||||
@ -27,9 +26,9 @@ namespace Poco
|
|||||||
{
|
{
|
||||||
namespace Net
|
namespace Net
|
||||||
{
|
{
|
||||||
|
constexpr size_t HTTP_DEFAULT_BUFFER_SIZE = 8 * 1024;
|
||||||
|
|
||||||
|
typedef Poco::BasicBufferedStreamBuf<char, std::char_traits<char>> HTTPBasicStreamBuf;
|
||||||
typedef Poco::BasicBufferedStreamBuf<char, std::char_traits<char>, HTTPBufferAllocator> HTTPBasicStreamBuf;
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -1,53 +0,0 @@
|
|||||||
//
|
|
||||||
// HTTPBufferAllocator.h
|
|
||||||
//
|
|
||||||
// Library: Net
|
|
||||||
// Package: HTTP
|
|
||||||
// Module: HTTPBufferAllocator
|
|
||||||
//
|
|
||||||
// Definition of the HTTPBufferAllocator class.
|
|
||||||
//
|
|
||||||
// Copyright (c) 2005-2006, Applied Informatics Software Engineering GmbH.
|
|
||||||
// and Contributors.
|
|
||||||
//
|
|
||||||
// SPDX-License-Identifier: BSL-1.0
|
|
||||||
//
|
|
||||||
|
|
||||||
|
|
||||||
#ifndef Net_HTTPBufferAllocator_INCLUDED
|
|
||||||
#define Net_HTTPBufferAllocator_INCLUDED
|
|
||||||
|
|
||||||
|
|
||||||
#include <ios>
|
|
||||||
#include "Poco/MemoryPool.h"
|
|
||||||
#include "Poco/Net/Net.h"
|
|
||||||
|
|
||||||
|
|
||||||
namespace Poco
|
|
||||||
{
|
|
||||||
namespace Net
|
|
||||||
{
|
|
||||||
|
|
||||||
|
|
||||||
class Net_API HTTPBufferAllocator
|
|
||||||
/// A BufferAllocator for HTTP streams.
|
|
||||||
{
|
|
||||||
public:
|
|
||||||
static char * allocate(std::streamsize size);
|
|
||||||
static void deallocate(char * ptr, std::streamsize size);
|
|
||||||
|
|
||||||
enum
|
|
||||||
{
|
|
||||||
BUFFER_SIZE = 128 * 1024
|
|
||||||
};
|
|
||||||
|
|
||||||
private:
|
|
||||||
static Poco::MemoryPool _pool;
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
}
|
|
||||||
} // namespace Poco::Net
|
|
||||||
|
|
||||||
|
|
||||||
#endif // Net_HTTPBufferAllocator_INCLUDED
|
|
@ -21,7 +21,6 @@
|
|||||||
#include <cstddef>
|
#include <cstddef>
|
||||||
#include <istream>
|
#include <istream>
|
||||||
#include <ostream>
|
#include <ostream>
|
||||||
#include "Poco/MemoryPool.h"
|
|
||||||
#include "Poco/Net/HTTPBasicStreamBuf.h"
|
#include "Poco/Net/HTTPBasicStreamBuf.h"
|
||||||
#include "Poco/Net/Net.h"
|
#include "Poco/Net/Net.h"
|
||||||
|
|
||||||
@ -80,12 +79,6 @@ namespace Net
|
|||||||
public:
|
public:
|
||||||
HTTPChunkedInputStream(HTTPSession & session);
|
HTTPChunkedInputStream(HTTPSession & session);
|
||||||
~HTTPChunkedInputStream();
|
~HTTPChunkedInputStream();
|
||||||
|
|
||||||
void * operator new(std::size_t size);
|
|
||||||
void operator delete(void * ptr);
|
|
||||||
|
|
||||||
private:
|
|
||||||
static Poco::MemoryPool _pool;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
@ -95,12 +88,6 @@ namespace Net
|
|||||||
public:
|
public:
|
||||||
HTTPChunkedOutputStream(HTTPSession & session);
|
HTTPChunkedOutputStream(HTTPSession & session);
|
||||||
~HTTPChunkedOutputStream();
|
~HTTPChunkedOutputStream();
|
||||||
|
|
||||||
void * operator new(std::size_t size);
|
|
||||||
void operator delete(void * ptr);
|
|
||||||
|
|
||||||
private:
|
|
||||||
static Poco::MemoryPool _pool;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
@ -306,7 +306,7 @@ namespace Net
|
|||||||
DEFAULT_KEEP_ALIVE_TIMEOUT = 8
|
DEFAULT_KEEP_ALIVE_TIMEOUT = 8
|
||||||
};
|
};
|
||||||
|
|
||||||
void reconnect();
|
virtual void reconnect();
|
||||||
/// Connects the underlying socket to the HTTP server.
|
/// Connects the underlying socket to the HTTP server.
|
||||||
|
|
||||||
int write(const char * buffer, std::streamsize length);
|
int write(const char * buffer, std::streamsize length);
|
||||||
|
@ -78,12 +78,6 @@ namespace Net
|
|||||||
public:
|
public:
|
||||||
HTTPFixedLengthInputStream(HTTPSession & session, HTTPFixedLengthStreamBuf::ContentLength length);
|
HTTPFixedLengthInputStream(HTTPSession & session, HTTPFixedLengthStreamBuf::ContentLength length);
|
||||||
~HTTPFixedLengthInputStream();
|
~HTTPFixedLengthInputStream();
|
||||||
|
|
||||||
void * operator new(std::size_t size);
|
|
||||||
void operator delete(void * ptr);
|
|
||||||
|
|
||||||
private:
|
|
||||||
static Poco::MemoryPool _pool;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
@ -93,12 +87,6 @@ namespace Net
|
|||||||
public:
|
public:
|
||||||
HTTPFixedLengthOutputStream(HTTPSession & session, HTTPFixedLengthStreamBuf::ContentLength length);
|
HTTPFixedLengthOutputStream(HTTPSession & session, HTTPFixedLengthStreamBuf::ContentLength length);
|
||||||
~HTTPFixedLengthOutputStream();
|
~HTTPFixedLengthOutputStream();
|
||||||
|
|
||||||
void * operator new(std::size_t size);
|
|
||||||
void operator delete(void * ptr);
|
|
||||||
|
|
||||||
private:
|
|
||||||
static Poco::MemoryPool _pool;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
@ -21,7 +21,6 @@
|
|||||||
#include <cstddef>
|
#include <cstddef>
|
||||||
#include <istream>
|
#include <istream>
|
||||||
#include <ostream>
|
#include <ostream>
|
||||||
#include "Poco/MemoryPool.h"
|
|
||||||
#include "Poco/Net/HTTPBasicStreamBuf.h"
|
#include "Poco/Net/HTTPBasicStreamBuf.h"
|
||||||
#include "Poco/Net/Net.h"
|
#include "Poco/Net/Net.h"
|
||||||
|
|
||||||
@ -74,12 +73,6 @@ namespace Net
|
|||||||
public:
|
public:
|
||||||
HTTPHeaderInputStream(HTTPSession & session);
|
HTTPHeaderInputStream(HTTPSession & session);
|
||||||
~HTTPHeaderInputStream();
|
~HTTPHeaderInputStream();
|
||||||
|
|
||||||
void * operator new(std::size_t size);
|
|
||||||
void operator delete(void * ptr);
|
|
||||||
|
|
||||||
private:
|
|
||||||
static Poco::MemoryPool _pool;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
@ -89,12 +82,6 @@ namespace Net
|
|||||||
public:
|
public:
|
||||||
HTTPHeaderOutputStream(HTTPSession & session);
|
HTTPHeaderOutputStream(HTTPSession & session);
|
||||||
~HTTPHeaderOutputStream();
|
~HTTPHeaderOutputStream();
|
||||||
|
|
||||||
void * operator new(std::size_t size);
|
|
||||||
void operator delete(void * ptr);
|
|
||||||
|
|
||||||
private:
|
|
||||||
static Poco::MemoryPool _pool;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
@ -192,7 +192,7 @@ namespace Net
|
|||||||
HTTPSession & operator=(const HTTPSession &);
|
HTTPSession & operator=(const HTTPSession &);
|
||||||
|
|
||||||
StreamSocket _socket;
|
StreamSocket _socket;
|
||||||
char * _pBuffer;
|
std::unique_ptr<char[]> _pBuffer;
|
||||||
char * _pCurrent;
|
char * _pCurrent;
|
||||||
char * _pEnd;
|
char * _pEnd;
|
||||||
bool _keepAlive;
|
bool _keepAlive;
|
||||||
|
@ -21,7 +21,6 @@
|
|||||||
#include <cstddef>
|
#include <cstddef>
|
||||||
#include <istream>
|
#include <istream>
|
||||||
#include <ostream>
|
#include <ostream>
|
||||||
#include "Poco/MemoryPool.h"
|
|
||||||
#include "Poco/Net/HTTPBasicStreamBuf.h"
|
#include "Poco/Net/HTTPBasicStreamBuf.h"
|
||||||
#include "Poco/Net/Net.h"
|
#include "Poco/Net/Net.h"
|
||||||
|
|
||||||
@ -75,12 +74,6 @@ namespace Net
|
|||||||
public:
|
public:
|
||||||
HTTPInputStream(HTTPSession & session);
|
HTTPInputStream(HTTPSession & session);
|
||||||
~HTTPInputStream();
|
~HTTPInputStream();
|
||||||
|
|
||||||
void * operator new(std::size_t size);
|
|
||||||
void operator delete(void * ptr);
|
|
||||||
|
|
||||||
private:
|
|
||||||
static Poco::MemoryPool _pool;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
@ -90,12 +83,6 @@ namespace Net
|
|||||||
public:
|
public:
|
||||||
HTTPOutputStream(HTTPSession & session);
|
HTTPOutputStream(HTTPSession & session);
|
||||||
~HTTPOutputStream();
|
~HTTPOutputStream();
|
||||||
|
|
||||||
void * operator new(std::size_t size);
|
|
||||||
void operator delete(void * ptr);
|
|
||||||
|
|
||||||
private:
|
|
||||||
static Poco::MemoryPool _pool;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,44 +0,0 @@
|
|||||||
//
|
|
||||||
// HTTPBufferAllocator.cpp
|
|
||||||
//
|
|
||||||
// Library: Net
|
|
||||||
// Package: HTTP
|
|
||||||
// Module: HTTPBufferAllocator
|
|
||||||
//
|
|
||||||
// Copyright (c) 2005-2006, Applied Informatics Software Engineering GmbH.
|
|
||||||
// and Contributors.
|
|
||||||
//
|
|
||||||
// SPDX-License-Identifier: BSL-1.0
|
|
||||||
//
|
|
||||||
|
|
||||||
|
|
||||||
#include "Poco/Net/HTTPBufferAllocator.h"
|
|
||||||
|
|
||||||
|
|
||||||
using Poco::MemoryPool;
|
|
||||||
|
|
||||||
|
|
||||||
namespace Poco {
|
|
||||||
namespace Net {
|
|
||||||
|
|
||||||
|
|
||||||
MemoryPool HTTPBufferAllocator::_pool(HTTPBufferAllocator::BUFFER_SIZE, 16);
|
|
||||||
|
|
||||||
|
|
||||||
char* HTTPBufferAllocator::allocate(std::streamsize size)
|
|
||||||
{
|
|
||||||
poco_assert_dbg (size == BUFFER_SIZE);
|
|
||||||
|
|
||||||
return reinterpret_cast<char*>(_pool.get());
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void HTTPBufferAllocator::deallocate(char* ptr, std::streamsize size)
|
|
||||||
{
|
|
||||||
poco_assert_dbg (size == BUFFER_SIZE);
|
|
||||||
|
|
||||||
_pool.release(ptr);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
} } // namespace Poco::Net
|
|
@ -34,7 +34,7 @@ namespace Net {
|
|||||||
|
|
||||||
|
|
||||||
HTTPChunkedStreamBuf::HTTPChunkedStreamBuf(HTTPSession& session, openmode mode):
|
HTTPChunkedStreamBuf::HTTPChunkedStreamBuf(HTTPSession& session, openmode mode):
|
||||||
HTTPBasicStreamBuf(HTTPBufferAllocator::BUFFER_SIZE, mode),
|
HTTPBasicStreamBuf(HTTP_DEFAULT_BUFFER_SIZE, mode),
|
||||||
_session(session),
|
_session(session),
|
||||||
_mode(mode),
|
_mode(mode),
|
||||||
_chunk(0)
|
_chunk(0)
|
||||||
@ -181,10 +181,6 @@ HTTPChunkedStreamBuf* HTTPChunkedIOS::rdbuf()
|
|||||||
// HTTPChunkedInputStream
|
// HTTPChunkedInputStream
|
||||||
//
|
//
|
||||||
|
|
||||||
|
|
||||||
Poco::MemoryPool HTTPChunkedInputStream::_pool(sizeof(HTTPChunkedInputStream));
|
|
||||||
|
|
||||||
|
|
||||||
HTTPChunkedInputStream::HTTPChunkedInputStream(HTTPSession& session):
|
HTTPChunkedInputStream::HTTPChunkedInputStream(HTTPSession& session):
|
||||||
HTTPChunkedIOS(session, std::ios::in),
|
HTTPChunkedIOS(session, std::ios::in),
|
||||||
std::istream(&_buf)
|
std::istream(&_buf)
|
||||||
@ -196,34 +192,10 @@ HTTPChunkedInputStream::~HTTPChunkedInputStream()
|
|||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void* HTTPChunkedInputStream::operator new(std::size_t size)
|
|
||||||
{
|
|
||||||
return _pool.get();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void HTTPChunkedInputStream::operator delete(void* ptr)
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
_pool.release(ptr);
|
|
||||||
}
|
|
||||||
catch (...)
|
|
||||||
{
|
|
||||||
poco_unexpected();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
//
|
//
|
||||||
// HTTPChunkedOutputStream
|
// HTTPChunkedOutputStream
|
||||||
//
|
//
|
||||||
|
|
||||||
|
|
||||||
Poco::MemoryPool HTTPChunkedOutputStream::_pool(sizeof(HTTPChunkedOutputStream));
|
|
||||||
|
|
||||||
|
|
||||||
HTTPChunkedOutputStream::HTTPChunkedOutputStream(HTTPSession& session):
|
HTTPChunkedOutputStream::HTTPChunkedOutputStream(HTTPSession& session):
|
||||||
HTTPChunkedIOS(session, std::ios::out),
|
HTTPChunkedIOS(session, std::ios::out),
|
||||||
std::ostream(&_buf)
|
std::ostream(&_buf)
|
||||||
@ -235,24 +207,4 @@ HTTPChunkedOutputStream::~HTTPChunkedOutputStream()
|
|||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void* HTTPChunkedOutputStream::operator new(std::size_t size)
|
|
||||||
{
|
|
||||||
return _pool.get();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void HTTPChunkedOutputStream::operator delete(void* ptr)
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
_pool.release(ptr);
|
|
||||||
}
|
|
||||||
catch (...)
|
|
||||||
{
|
|
||||||
poco_unexpected();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
} } // namespace Poco::Net
|
} } // namespace Poco::Net
|
||||||
|
@ -30,7 +30,7 @@ namespace Net {
|
|||||||
|
|
||||||
|
|
||||||
HTTPFixedLengthStreamBuf::HTTPFixedLengthStreamBuf(HTTPSession& session, ContentLength length, openmode mode):
|
HTTPFixedLengthStreamBuf::HTTPFixedLengthStreamBuf(HTTPSession& session, ContentLength length, openmode mode):
|
||||||
HTTPBasicStreamBuf(HTTPBufferAllocator::BUFFER_SIZE, mode),
|
HTTPBasicStreamBuf(HTTP_DEFAULT_BUFFER_SIZE, mode),
|
||||||
_session(session),
|
_session(session),
|
||||||
_length(length),
|
_length(length),
|
||||||
_count(0)
|
_count(0)
|
||||||
@ -109,9 +109,6 @@ HTTPFixedLengthStreamBuf* HTTPFixedLengthIOS::rdbuf()
|
|||||||
//
|
//
|
||||||
|
|
||||||
|
|
||||||
Poco::MemoryPool HTTPFixedLengthInputStream::_pool(sizeof(HTTPFixedLengthInputStream));
|
|
||||||
|
|
||||||
|
|
||||||
HTTPFixedLengthInputStream::HTTPFixedLengthInputStream(HTTPSession& session, HTTPFixedLengthStreamBuf::ContentLength length):
|
HTTPFixedLengthInputStream::HTTPFixedLengthInputStream(HTTPSession& session, HTTPFixedLengthStreamBuf::ContentLength length):
|
||||||
HTTPFixedLengthIOS(session, length, std::ios::in),
|
HTTPFixedLengthIOS(session, length, std::ios::in),
|
||||||
std::istream(&_buf)
|
std::istream(&_buf)
|
||||||
@ -124,33 +121,10 @@ HTTPFixedLengthInputStream::~HTTPFixedLengthInputStream()
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void* HTTPFixedLengthInputStream::operator new(std::size_t size)
|
|
||||||
{
|
|
||||||
return _pool.get();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void HTTPFixedLengthInputStream::operator delete(void* ptr)
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
_pool.release(ptr);
|
|
||||||
}
|
|
||||||
catch (...)
|
|
||||||
{
|
|
||||||
poco_unexpected();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
//
|
//
|
||||||
// HTTPFixedLengthOutputStream
|
// HTTPFixedLengthOutputStream
|
||||||
//
|
//
|
||||||
|
|
||||||
|
|
||||||
Poco::MemoryPool HTTPFixedLengthOutputStream::_pool(sizeof(HTTPFixedLengthOutputStream));
|
|
||||||
|
|
||||||
|
|
||||||
HTTPFixedLengthOutputStream::HTTPFixedLengthOutputStream(HTTPSession& session, HTTPFixedLengthStreamBuf::ContentLength length):
|
HTTPFixedLengthOutputStream::HTTPFixedLengthOutputStream(HTTPSession& session, HTTPFixedLengthStreamBuf::ContentLength length):
|
||||||
HTTPFixedLengthIOS(session, length, std::ios::out),
|
HTTPFixedLengthIOS(session, length, std::ios::out),
|
||||||
std::ostream(&_buf)
|
std::ostream(&_buf)
|
||||||
@ -163,23 +137,4 @@ HTTPFixedLengthOutputStream::~HTTPFixedLengthOutputStream()
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void* HTTPFixedLengthOutputStream::operator new(std::size_t size)
|
|
||||||
{
|
|
||||||
return _pool.get();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void HTTPFixedLengthOutputStream::operator delete(void* ptr)
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
_pool.release(ptr);
|
|
||||||
}
|
|
||||||
catch (...)
|
|
||||||
{
|
|
||||||
poco_unexpected();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
} } // namespace Poco::Net
|
} } // namespace Poco::Net
|
||||||
|
@ -26,7 +26,7 @@ namespace Net {
|
|||||||
|
|
||||||
|
|
||||||
HTTPHeaderStreamBuf::HTTPHeaderStreamBuf(HTTPSession& session, openmode mode):
|
HTTPHeaderStreamBuf::HTTPHeaderStreamBuf(HTTPSession& session, openmode mode):
|
||||||
HTTPBasicStreamBuf(HTTPBufferAllocator::BUFFER_SIZE, mode),
|
HTTPBasicStreamBuf(HTTP_DEFAULT_BUFFER_SIZE, mode),
|
||||||
_session(session),
|
_session(session),
|
||||||
_end(false)
|
_end(false)
|
||||||
{
|
{
|
||||||
@ -101,10 +101,6 @@ HTTPHeaderStreamBuf* HTTPHeaderIOS::rdbuf()
|
|||||||
// HTTPHeaderInputStream
|
// HTTPHeaderInputStream
|
||||||
//
|
//
|
||||||
|
|
||||||
|
|
||||||
Poco::MemoryPool HTTPHeaderInputStream::_pool(sizeof(HTTPHeaderInputStream));
|
|
||||||
|
|
||||||
|
|
||||||
HTTPHeaderInputStream::HTTPHeaderInputStream(HTTPSession& session):
|
HTTPHeaderInputStream::HTTPHeaderInputStream(HTTPSession& session):
|
||||||
HTTPHeaderIOS(session, std::ios::in),
|
HTTPHeaderIOS(session, std::ios::in),
|
||||||
std::istream(&_buf)
|
std::istream(&_buf)
|
||||||
@ -116,34 +112,10 @@ HTTPHeaderInputStream::~HTTPHeaderInputStream()
|
|||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void* HTTPHeaderInputStream::operator new(std::size_t size)
|
|
||||||
{
|
|
||||||
return _pool.get();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void HTTPHeaderInputStream::operator delete(void* ptr)
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
_pool.release(ptr);
|
|
||||||
}
|
|
||||||
catch (...)
|
|
||||||
{
|
|
||||||
poco_unexpected();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
//
|
//
|
||||||
// HTTPHeaderOutputStream
|
// HTTPHeaderOutputStream
|
||||||
//
|
//
|
||||||
|
|
||||||
|
|
||||||
Poco::MemoryPool HTTPHeaderOutputStream::_pool(sizeof(HTTPHeaderOutputStream));
|
|
||||||
|
|
||||||
|
|
||||||
HTTPHeaderOutputStream::HTTPHeaderOutputStream(HTTPSession& session):
|
HTTPHeaderOutputStream::HTTPHeaderOutputStream(HTTPSession& session):
|
||||||
HTTPHeaderIOS(session, std::ios::out),
|
HTTPHeaderIOS(session, std::ios::out),
|
||||||
std::ostream(&_buf)
|
std::ostream(&_buf)
|
||||||
@ -155,24 +127,4 @@ HTTPHeaderOutputStream::~HTTPHeaderOutputStream()
|
|||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void* HTTPHeaderOutputStream::operator new(std::size_t size)
|
|
||||||
{
|
|
||||||
return _pool.get();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void HTTPHeaderOutputStream::operator delete(void* ptr)
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
_pool.release(ptr);
|
|
||||||
}
|
|
||||||
catch (...)
|
|
||||||
{
|
|
||||||
poco_unexpected();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
} } // namespace Poco::Net
|
} } // namespace Poco::Net
|
||||||
|
@ -13,8 +13,8 @@
|
|||||||
|
|
||||||
|
|
||||||
#include "Poco/Net/HTTPSession.h"
|
#include "Poco/Net/HTTPSession.h"
|
||||||
#include "Poco/Net/HTTPBufferAllocator.h"
|
|
||||||
#include "Poco/Net/NetException.h"
|
#include "Poco/Net/NetException.h"
|
||||||
|
#include "Poco/Net/HTTPBasicStreamBuf.h"
|
||||||
#include <cstring>
|
#include <cstring>
|
||||||
|
|
||||||
|
|
||||||
@ -68,14 +68,6 @@ HTTPSession::HTTPSession(const StreamSocket& socket, bool keepAlive):
|
|||||||
|
|
||||||
HTTPSession::~HTTPSession()
|
HTTPSession::~HTTPSession()
|
||||||
{
|
{
|
||||||
try
|
|
||||||
{
|
|
||||||
if (_pBuffer) HTTPBufferAllocator::deallocate(_pBuffer, HTTPBufferAllocator::BUFFER_SIZE);
|
|
||||||
}
|
|
||||||
catch (...)
|
|
||||||
{
|
|
||||||
poco_unexpected();
|
|
||||||
}
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
close();
|
close();
|
||||||
@ -177,10 +169,10 @@ void HTTPSession::refill()
|
|||||||
{
|
{
|
||||||
if (!_pBuffer)
|
if (!_pBuffer)
|
||||||
{
|
{
|
||||||
_pBuffer = HTTPBufferAllocator::allocate(HTTPBufferAllocator::BUFFER_SIZE);
|
_pBuffer = std::make_unique<char[]>(HTTP_DEFAULT_BUFFER_SIZE);
|
||||||
}
|
}
|
||||||
_pCurrent = _pEnd = _pBuffer;
|
_pCurrent = _pEnd = _pBuffer.get();
|
||||||
int n = receive(_pBuffer, HTTPBufferAllocator::BUFFER_SIZE);
|
int n = receive(_pBuffer.get(), HTTP_DEFAULT_BUFFER_SIZE);
|
||||||
_pEnd += n;
|
_pEnd += n;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -199,7 +191,7 @@ void HTTPSession::connect(const SocketAddress& address)
|
|||||||
_socket.setNoDelay(true);
|
_socket.setNoDelay(true);
|
||||||
// There may be leftover data from a previous (failed) request in the buffer,
|
// There may be leftover data from a previous (failed) request in the buffer,
|
||||||
// so we clear it.
|
// so we clear it.
|
||||||
_pCurrent = _pEnd = _pBuffer;
|
_pCurrent = _pEnd = _pBuffer.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -26,7 +26,7 @@ namespace Net {
|
|||||||
|
|
||||||
|
|
||||||
HTTPStreamBuf::HTTPStreamBuf(HTTPSession& session, openmode mode):
|
HTTPStreamBuf::HTTPStreamBuf(HTTPSession& session, openmode mode):
|
||||||
HTTPBasicStreamBuf(HTTPBufferAllocator::BUFFER_SIZE, mode),
|
HTTPBasicStreamBuf(HTTP_DEFAULT_BUFFER_SIZE, mode),
|
||||||
_session(session),
|
_session(session),
|
||||||
_mode(mode)
|
_mode(mode)
|
||||||
{
|
{
|
||||||
@ -96,10 +96,6 @@ HTTPStreamBuf* HTTPIOS::rdbuf()
|
|||||||
// HTTPInputStream
|
// HTTPInputStream
|
||||||
//
|
//
|
||||||
|
|
||||||
|
|
||||||
Poco::MemoryPool HTTPInputStream::_pool(sizeof(HTTPInputStream));
|
|
||||||
|
|
||||||
|
|
||||||
HTTPInputStream::HTTPInputStream(HTTPSession& session):
|
HTTPInputStream::HTTPInputStream(HTTPSession& session):
|
||||||
HTTPIOS(session, std::ios::in),
|
HTTPIOS(session, std::ios::in),
|
||||||
std::istream(&_buf)
|
std::istream(&_buf)
|
||||||
@ -112,33 +108,11 @@ HTTPInputStream::~HTTPInputStream()
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void* HTTPInputStream::operator new(std::size_t size)
|
|
||||||
{
|
|
||||||
return _pool.get();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void HTTPInputStream::operator delete(void* ptr)
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
_pool.release(ptr);
|
|
||||||
}
|
|
||||||
catch (...)
|
|
||||||
{
|
|
||||||
poco_unexpected();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
//
|
//
|
||||||
// HTTPOutputStream
|
// HTTPOutputStream
|
||||||
//
|
//
|
||||||
|
|
||||||
|
|
||||||
Poco::MemoryPool HTTPOutputStream::_pool(sizeof(HTTPOutputStream));
|
|
||||||
|
|
||||||
|
|
||||||
HTTPOutputStream::HTTPOutputStream(HTTPSession& session):
|
HTTPOutputStream::HTTPOutputStream(HTTPSession& session):
|
||||||
HTTPIOS(session, std::ios::out),
|
HTTPIOS(session, std::ios::out),
|
||||||
std::ostream(&_buf)
|
std::ostream(&_buf)
|
||||||
@ -150,24 +124,4 @@ HTTPOutputStream::~HTTPOutputStream()
|
|||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void* HTTPOutputStream::operator new(std::size_t size)
|
|
||||||
{
|
|
||||||
return _pool.get();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void HTTPOutputStream::operator delete(void* ptr)
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
_pool.release(ptr);
|
|
||||||
}
|
|
||||||
catch (...)
|
|
||||||
{
|
|
||||||
poco_unexpected();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
} } // namespace Poco::Net
|
} } // namespace Poco::Net
|
||||||
|
@ -1,53 +0,0 @@
|
|||||||
//
|
|
||||||
// ConsoleCertificateHandler.h
|
|
||||||
//
|
|
||||||
// Library: NetSSL_OpenSSL
|
|
||||||
// Package: SSLCore
|
|
||||||
// Module: ConsoleCertificateHandler
|
|
||||||
//
|
|
||||||
// Definition of the ConsoleCertificateHandler class.
|
|
||||||
//
|
|
||||||
// Copyright (c) 2006-2009, Applied Informatics Software Engineering GmbH.
|
|
||||||
// and Contributors.
|
|
||||||
//
|
|
||||||
// SPDX-License-Identifier: BSL-1.0
|
|
||||||
//
|
|
||||||
|
|
||||||
|
|
||||||
#ifndef NetSSL_ConsoleCertificateHandler_INCLUDED
|
|
||||||
#define NetSSL_ConsoleCertificateHandler_INCLUDED
|
|
||||||
|
|
||||||
|
|
||||||
#include "Poco/Net/InvalidCertificateHandler.h"
|
|
||||||
#include "Poco/Net/NetSSL.h"
|
|
||||||
|
|
||||||
|
|
||||||
namespace Poco
|
|
||||||
{
|
|
||||||
namespace Net
|
|
||||||
{
|
|
||||||
|
|
||||||
|
|
||||||
class NetSSL_API ConsoleCertificateHandler : public InvalidCertificateHandler
|
|
||||||
/// A ConsoleCertificateHandler is invoked whenever an error occurs verifying the certificate.
|
|
||||||
///
|
|
||||||
/// The certificate is printed to stdout and the user is asked via console if he wants to accept it.
|
|
||||||
{
|
|
||||||
public:
|
|
||||||
ConsoleCertificateHandler(bool handleErrorsOnServerSide);
|
|
||||||
/// Creates the ConsoleCertificateHandler.
|
|
||||||
|
|
||||||
virtual ~ConsoleCertificateHandler();
|
|
||||||
/// Destroys the ConsoleCertificateHandler.
|
|
||||||
|
|
||||||
void onInvalidCertificate(const void * pSender, VerificationErrorArgs & errorCert);
|
|
||||||
/// Prints the certificate to stdout and waits for user input on the console
|
|
||||||
/// to decide if a certificate should be accepted/rejected.
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
}
|
|
||||||
} // namespace Poco::Net
|
|
||||||
|
|
||||||
|
|
||||||
#endif // NetSSL_ConsoleCertificateHandler_INCLUDED
|
|
@ -85,7 +85,7 @@ namespace Net
|
|||||||
/// </options>
|
/// </options>
|
||||||
/// </privateKeyPassphraseHandler>
|
/// </privateKeyPassphraseHandler>
|
||||||
/// <invalidCertificateHandler>
|
/// <invalidCertificateHandler>
|
||||||
/// <name>ConsoleCertificateHandler</name>
|
/// <name>RejectCertificateHandler</name>
|
||||||
/// </invalidCertificateHandler>
|
/// </invalidCertificateHandler>
|
||||||
/// <cacheSessions>true|false</cacheSessions>
|
/// <cacheSessions>true|false</cacheSessions>
|
||||||
/// <sessionIdContext>someString</sessionIdContext> <!-- server only -->
|
/// <sessionIdContext>someString</sessionIdContext> <!-- server only -->
|
||||||
@ -186,7 +186,7 @@ namespace Net
|
|||||||
///
|
///
|
||||||
/// Valid initialization code would be:
|
/// Valid initialization code would be:
|
||||||
/// SharedPtr<PrivateKeyPassphraseHandler> pConsoleHandler = new KeyConsoleHandler;
|
/// SharedPtr<PrivateKeyPassphraseHandler> pConsoleHandler = new KeyConsoleHandler;
|
||||||
/// SharedPtr<InvalidCertificateHandler> pInvalidCertHandler = new ConsoleCertificateHandler;
|
/// SharedPtr<InvalidCertificateHandler> pInvalidCertHandler = new RejectCertificateHandler;
|
||||||
/// Context::Ptr pContext = new Context(Context::SERVER_USE, "any.pem", "any.pem", "rootcert.pem", Context::VERIFY_RELAXED, 9, false, "ALL:!ADH:!LOW:!EXP:!MD5:@STRENGTH");
|
/// Context::Ptr pContext = new Context(Context::SERVER_USE, "any.pem", "any.pem", "rootcert.pem", Context::VERIFY_RELAXED, 9, false, "ALL:!ADH:!LOW:!EXP:!MD5:@STRENGTH");
|
||||||
/// SSLManager::instance().initializeServer(pConsoleHandler, pInvalidCertHandler, pContext);
|
/// SSLManager::instance().initializeServer(pConsoleHandler, pInvalidCertHandler, pContext);
|
||||||
|
|
||||||
@ -203,7 +203,7 @@ namespace Net
|
|||||||
///
|
///
|
||||||
/// Valid initialization code would be:
|
/// Valid initialization code would be:
|
||||||
/// SharedPtr<PrivateKeyPassphraseHandler> pConsoleHandler = new KeyConsoleHandler;
|
/// SharedPtr<PrivateKeyPassphraseHandler> pConsoleHandler = new KeyConsoleHandler;
|
||||||
/// SharedPtr<InvalidCertificateHandler> pInvalidCertHandler = new ConsoleCertificateHandler;
|
/// SharedPtr<InvalidCertificateHandler> pInvalidCertHandler = new RejectCertificateHandler;
|
||||||
/// Context::Ptr pContext = new Context(Context::CLIENT_USE, "", "", "rootcert.pem", Context::VERIFY_RELAXED, 9, false, "ALL:!ADH:!LOW:!EXP:!MD5:@STRENGTH");
|
/// Context::Ptr pContext = new Context(Context::CLIENT_USE, "", "", "rootcert.pem", Context::VERIFY_RELAXED, 9, false, "ALL:!ADH:!LOW:!EXP:!MD5:@STRENGTH");
|
||||||
/// SSLManager::instance().initializeClient(pConsoleHandler, pInvalidCertHandler, pContext);
|
/// SSLManager::instance().initializeClient(pConsoleHandler, pInvalidCertHandler, pContext);
|
||||||
|
|
||||||
|
@ -13,7 +13,6 @@
|
|||||||
|
|
||||||
|
|
||||||
#include "Poco/Net/CertificateHandlerFactoryMgr.h"
|
#include "Poco/Net/CertificateHandlerFactoryMgr.h"
|
||||||
#include "Poco/Net/ConsoleCertificateHandler.h"
|
|
||||||
#include "Poco/Net/AcceptCertificateHandler.h"
|
#include "Poco/Net/AcceptCertificateHandler.h"
|
||||||
#include "Poco/Net/RejectCertificateHandler.h"
|
#include "Poco/Net/RejectCertificateHandler.h"
|
||||||
|
|
||||||
@ -24,7 +23,6 @@ namespace Net {
|
|||||||
|
|
||||||
CertificateHandlerFactoryMgr::CertificateHandlerFactoryMgr()
|
CertificateHandlerFactoryMgr::CertificateHandlerFactoryMgr()
|
||||||
{
|
{
|
||||||
setFactory("ConsoleCertificateHandler", new CertificateHandlerFactoryImpl<ConsoleCertificateHandler>());
|
|
||||||
setFactory("AcceptCertificateHandler", new CertificateHandlerFactoryImpl<AcceptCertificateHandler>());
|
setFactory("AcceptCertificateHandler", new CertificateHandlerFactoryImpl<AcceptCertificateHandler>());
|
||||||
setFactory("RejectCertificateHandler", new CertificateHandlerFactoryImpl<RejectCertificateHandler>());
|
setFactory("RejectCertificateHandler", new CertificateHandlerFactoryImpl<RejectCertificateHandler>());
|
||||||
}
|
}
|
||||||
|
@ -1,53 +0,0 @@
|
|||||||
//
|
|
||||||
// ConsoleCertificateHandler.cpp
|
|
||||||
//
|
|
||||||
// Library: NetSSL_OpenSSL
|
|
||||||
// Package: SSLCore
|
|
||||||
// Module: ConsoleCertificateHandler
|
|
||||||
//
|
|
||||||
// Copyright (c) 2006-2009, Applied Informatics Software Engineering GmbH.
|
|
||||||
// and Contributors.
|
|
||||||
//
|
|
||||||
// SPDX-License-Identifier: BSL-1.0
|
|
||||||
//
|
|
||||||
|
|
||||||
|
|
||||||
#include "Poco/Net/ConsoleCertificateHandler.h"
|
|
||||||
#include <iostream>
|
|
||||||
|
|
||||||
|
|
||||||
namespace Poco {
|
|
||||||
namespace Net {
|
|
||||||
|
|
||||||
|
|
||||||
ConsoleCertificateHandler::ConsoleCertificateHandler(bool server): InvalidCertificateHandler(server)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
ConsoleCertificateHandler::~ConsoleCertificateHandler()
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void ConsoleCertificateHandler::onInvalidCertificate(const void*, VerificationErrorArgs& errorCert)
|
|
||||||
{
|
|
||||||
const X509Certificate& aCert = errorCert.certificate();
|
|
||||||
std::cout << "\n";
|
|
||||||
std::cout << "WARNING: Certificate verification failed\n";
|
|
||||||
std::cout << "----------------------------------------\n";
|
|
||||||
std::cout << "Issuer Name: " << aCert.issuerName() << "\n";
|
|
||||||
std::cout << "Subject Name: " << aCert.subjectName() << "\n\n";
|
|
||||||
std::cout << "The certificate yielded the error: " << errorCert.errorMessage() << "\n\n";
|
|
||||||
std::cout << "The error occurred in the certificate chain at position " << errorCert.errorDepth() << "\n";
|
|
||||||
std::cout << "Accept the certificate (y,n)? ";
|
|
||||||
char c = 0;
|
|
||||||
std::cin >> c;
|
|
||||||
if (c == 'y' || c == 'Y')
|
|
||||||
errorCert.setIgnoreError(true);
|
|
||||||
else
|
|
||||||
errorCert.setIgnoreError(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
} } // namespace Poco::Net
|
|
@ -46,7 +46,7 @@ const std::string SSLManager::CFG_PREFER_SERVER_CIPHERS("preferServerCiphers");
|
|||||||
const std::string SSLManager::CFG_DELEGATE_HANDLER("privateKeyPassphraseHandler.name");
|
const std::string SSLManager::CFG_DELEGATE_HANDLER("privateKeyPassphraseHandler.name");
|
||||||
const std::string SSLManager::VAL_DELEGATE_HANDLER("KeyConsoleHandler");
|
const std::string SSLManager::VAL_DELEGATE_HANDLER("KeyConsoleHandler");
|
||||||
const std::string SSLManager::CFG_CERTIFICATE_HANDLER("invalidCertificateHandler.name");
|
const std::string SSLManager::CFG_CERTIFICATE_HANDLER("invalidCertificateHandler.name");
|
||||||
const std::string SSLManager::VAL_CERTIFICATE_HANDLER("ConsoleCertificateHandler");
|
const std::string SSLManager::VAL_CERTIFICATE_HANDLER("RejectCertificateHandler");
|
||||||
const std::string SSLManager::CFG_SERVER_PREFIX("openSSL.server.");
|
const std::string SSLManager::CFG_SERVER_PREFIX("openSSL.server.");
|
||||||
const std::string SSLManager::CFG_CLIENT_PREFIX("openSSL.client.");
|
const std::string SSLManager::CFG_CLIENT_PREFIX("openSSL.client.");
|
||||||
const std::string SSLManager::CFG_CACHE_SESSIONS("cacheSessions");
|
const std::string SSLManager::CFG_CACHE_SESSIONS("cacheSessions");
|
||||||
|
@ -2,11 +2,11 @@
|
|||||||
|
|
||||||
# NOTE: has nothing common with DBMS_TCP_PROTOCOL_VERSION,
|
# NOTE: has nothing common with DBMS_TCP_PROTOCOL_VERSION,
|
||||||
# only DBMS_TCP_PROTOCOL_VERSION should be incremented on protocol changes.
|
# only DBMS_TCP_PROTOCOL_VERSION should be incremented on protocol changes.
|
||||||
SET(VERSION_REVISION 54475)
|
SET(VERSION_REVISION 54476)
|
||||||
SET(VERSION_MAJOR 23)
|
SET(VERSION_MAJOR 23)
|
||||||
SET(VERSION_MINOR 6)
|
SET(VERSION_MINOR 7)
|
||||||
SET(VERSION_PATCH 1)
|
SET(VERSION_PATCH 1)
|
||||||
SET(VERSION_GITHASH 2fec796e73efda10a538a03af3205ce8ffa1b2de)
|
SET(VERSION_GITHASH d1c7e13d08868cb04d3562dcced704dd577cb1df)
|
||||||
SET(VERSION_DESCRIBE v23.6.1.1-testing)
|
SET(VERSION_DESCRIBE v23.7.1.1-testing)
|
||||||
SET(VERSION_STRING 23.6.1.1)
|
SET(VERSION_STRING 23.7.1.1)
|
||||||
# end of autochange
|
# end of autochange
|
||||||
|
@ -15,6 +15,7 @@ set(CMAKE_OSX_DEPLOYMENT_TARGET 10.15)
|
|||||||
set(THREADS_PREFER_PTHREAD_FLAG ON)
|
set(THREADS_PREFER_PTHREAD_FLAG ON)
|
||||||
find_package(Threads REQUIRED)
|
find_package(Threads REQUIRED)
|
||||||
|
|
||||||
|
include (cmake/unwind.cmake)
|
||||||
include (cmake/cxx.cmake)
|
include (cmake/cxx.cmake)
|
||||||
link_libraries(global-group)
|
link_libraries(global-group)
|
||||||
|
|
||||||
|
@ -1,58 +0,0 @@
|
|||||||
# Embed a set of resource files into a resulting object file.
|
|
||||||
#
|
|
||||||
# Signature: `clickhouse_embed_binaries(TARGET <target> RESOURCE_DIR <dir> RESOURCES <resource> ...)
|
|
||||||
#
|
|
||||||
# This will generate a static library target named `<target>`, which contains the contents of
|
|
||||||
# each `<resource>` file. The files should be located in `<dir>`. <dir> defaults to
|
|
||||||
# ${CMAKE_CURRENT_SOURCE_DIR}, and the resources may not be empty.
|
|
||||||
#
|
|
||||||
# Each resource will result in three symbols in the final archive, based on the name `<resource>`.
|
|
||||||
# These are:
|
|
||||||
# 1. `_binary_<name>_start`: Points to the start of the binary data from `<resource>`.
|
|
||||||
# 2. `_binary_<name>_end`: Points to the end of the binary data from `<resource>`.
|
|
||||||
# 2. `_binary_<name>_size`: Points to the size of the binary data from `<resource>`.
|
|
||||||
#
|
|
||||||
# `<name>` is a normalized name derived from `<resource>`, by replacing the characters "./-" with
|
|
||||||
# the character "_", and the character "+" with "_PLUS_". This scheme is similar to those generated
|
|
||||||
# by `ld -r -b binary`, and matches the expectations in `./base/common/getResource.cpp`.
|
|
||||||
macro(clickhouse_embed_binaries)
|
|
||||||
set(one_value_args TARGET RESOURCE_DIR)
|
|
||||||
set(resources RESOURCES)
|
|
||||||
cmake_parse_arguments(EMBED "" "${one_value_args}" ${resources} ${ARGN})
|
|
||||||
|
|
||||||
if (NOT DEFINED EMBED_TARGET)
|
|
||||||
message(FATAL_ERROR "A target name must be provided for embedding binary resources into")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
if (NOT DEFINED EMBED_RESOURCE_DIR)
|
|
||||||
set(EMBED_RESOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
list(LENGTH EMBED_RESOURCES N_RESOURCES)
|
|
||||||
if (N_RESOURCES LESS 1)
|
|
||||||
message(FATAL_ERROR "The list of binary resources to embed may not be empty")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
add_library("${EMBED_TARGET}" STATIC)
|
|
||||||
set_target_properties("${EMBED_TARGET}" PROPERTIES LINKER_LANGUAGE C)
|
|
||||||
|
|
||||||
set(EMBED_TEMPLATE_FILE "${PROJECT_SOURCE_DIR}/programs/embed_binary.S.in")
|
|
||||||
|
|
||||||
foreach(RESOURCE_FILE ${EMBED_RESOURCES})
|
|
||||||
set(ASSEMBLY_FILE_NAME "${RESOURCE_FILE}.S")
|
|
||||||
set(BINARY_FILE_NAME "${RESOURCE_FILE}")
|
|
||||||
|
|
||||||
# Normalize the name of the resource.
|
|
||||||
string(REGEX REPLACE "[\./-]" "_" SYMBOL_NAME "${RESOURCE_FILE}") # - must be last in regex
|
|
||||||
string(REPLACE "+" "_PLUS_" SYMBOL_NAME "${SYMBOL_NAME}")
|
|
||||||
|
|
||||||
# Generate the configured assembly file in the output directory.
|
|
||||||
configure_file("${EMBED_TEMPLATE_FILE}" "${CMAKE_CURRENT_BINARY_DIR}/${ASSEMBLY_FILE_NAME}" @ONLY)
|
|
||||||
|
|
||||||
# Set the include directory for relative paths specified for `.incbin` directive.
|
|
||||||
set_property(SOURCE "${CMAKE_CURRENT_BINARY_DIR}/${ASSEMBLY_FILE_NAME}" APPEND PROPERTY INCLUDE_DIRECTORIES "${EMBED_RESOURCE_DIR}")
|
|
||||||
|
|
||||||
target_sources("${EMBED_TARGET}" PRIVATE "${CMAKE_CURRENT_BINARY_DIR}/${ASSEMBLY_FILE_NAME}")
|
|
||||||
set_target_properties("${EMBED_TARGET}" PROPERTIES OBJECT_DEPENDS "${RESOURCE_FILE}")
|
|
||||||
endforeach()
|
|
||||||
endmacro()
|
|
@ -1,38 +1,39 @@
|
|||||||
# Usage:
|
# Limit compiler/linker job concurrency to avoid OOMs on subtrees where compilation/linking is memory-intensive.
|
||||||
# set (MAX_COMPILER_MEMORY 2000 CACHE INTERNAL "") # In megabytes
|
#
|
||||||
# set (MAX_LINKER_MEMORY 3500 CACHE INTERNAL "")
|
# Usage from CMake:
|
||||||
# include (cmake/limit_jobs.cmake)
|
# set (MAX_COMPILER_MEMORY 2000 CACHE INTERNAL "") # megabyte
|
||||||
|
# set (MAX_LINKER_MEMORY 3500 CACHE INTERNAL "") # megabyte
|
||||||
|
# include (cmake/limit_jobs.cmake)
|
||||||
|
#
|
||||||
|
# (bigger values mean fewer jobs)
|
||||||
|
|
||||||
cmake_host_system_information(RESULT TOTAL_PHYSICAL_MEMORY QUERY TOTAL_PHYSICAL_MEMORY) # Not available under freebsd
|
cmake_host_system_information(RESULT TOTAL_PHYSICAL_MEMORY QUERY TOTAL_PHYSICAL_MEMORY)
|
||||||
cmake_host_system_information(RESULT NUMBER_OF_LOGICAL_CORES QUERY NUMBER_OF_LOGICAL_CORES)
|
cmake_host_system_information(RESULT NUMBER_OF_LOGICAL_CORES QUERY NUMBER_OF_LOGICAL_CORES)
|
||||||
|
|
||||||
# 1 if not set
|
# Set to disable the automatic job-limiting
|
||||||
option(PARALLEL_COMPILE_JOBS "Maximum number of concurrent compilation jobs" "")
|
option(PARALLEL_COMPILE_JOBS "Maximum number of concurrent compilation jobs" OFF)
|
||||||
|
option(PARALLEL_LINK_JOBS "Maximum number of concurrent link jobs" OFF)
|
||||||
|
|
||||||
# 1 if not set
|
if (NOT PARALLEL_COMPILE_JOBS AND MAX_COMPILER_MEMORY)
|
||||||
option(PARALLEL_LINK_JOBS "Maximum number of concurrent link jobs" "")
|
|
||||||
|
|
||||||
if (NOT PARALLEL_COMPILE_JOBS AND TOTAL_PHYSICAL_MEMORY AND MAX_COMPILER_MEMORY)
|
|
||||||
math(EXPR PARALLEL_COMPILE_JOBS ${TOTAL_PHYSICAL_MEMORY}/${MAX_COMPILER_MEMORY})
|
math(EXPR PARALLEL_COMPILE_JOBS ${TOTAL_PHYSICAL_MEMORY}/${MAX_COMPILER_MEMORY})
|
||||||
|
|
||||||
if (NOT PARALLEL_COMPILE_JOBS)
|
if (NOT PARALLEL_COMPILE_JOBS)
|
||||||
set (PARALLEL_COMPILE_JOBS 1)
|
set (PARALLEL_COMPILE_JOBS 1)
|
||||||
endif ()
|
endif ()
|
||||||
|
if (PARALLEL_COMPILE_JOBS LESS NUMBER_OF_LOGICAL_CORES)
|
||||||
|
message(WARNING "The auto-calculated compile jobs limit (${PARALLEL_COMPILE_JOBS}) underutilizes CPU cores (${NUMBER_OF_LOGICAL_CORES}). Set PARALLEL_COMPILE_JOBS to override.")
|
||||||
|
endif()
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (PARALLEL_COMPILE_JOBS AND (NOT NUMBER_OF_LOGICAL_CORES OR PARALLEL_COMPILE_JOBS LESS NUMBER_OF_LOGICAL_CORES))
|
if (NOT PARALLEL_LINK_JOBS AND MAX_LINKER_MEMORY)
|
||||||
set(CMAKE_JOB_POOL_COMPILE compile_job_pool${CMAKE_CURRENT_SOURCE_DIR})
|
|
||||||
string (REGEX REPLACE "[^a-zA-Z0-9]+" "_" CMAKE_JOB_POOL_COMPILE ${CMAKE_JOB_POOL_COMPILE})
|
|
||||||
set_property(GLOBAL APPEND PROPERTY JOB_POOLS ${CMAKE_JOB_POOL_COMPILE}=${PARALLEL_COMPILE_JOBS})
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
|
|
||||||
if (NOT PARALLEL_LINK_JOBS AND TOTAL_PHYSICAL_MEMORY AND MAX_LINKER_MEMORY)
|
|
||||||
math(EXPR PARALLEL_LINK_JOBS ${TOTAL_PHYSICAL_MEMORY}/${MAX_LINKER_MEMORY})
|
math(EXPR PARALLEL_LINK_JOBS ${TOTAL_PHYSICAL_MEMORY}/${MAX_LINKER_MEMORY})
|
||||||
|
|
||||||
if (NOT PARALLEL_LINK_JOBS)
|
if (NOT PARALLEL_LINK_JOBS)
|
||||||
set (PARALLEL_LINK_JOBS 1)
|
set (PARALLEL_LINK_JOBS 1)
|
||||||
endif ()
|
endif ()
|
||||||
|
if (PARALLEL_LINK_JOBS LESS NUMBER_OF_LOGICAL_CORES)
|
||||||
|
message(WARNING "The auto-calculated link jobs limit (${PARALLEL_LINK_JOBS}) underutilizes CPU cores (${NUMBER_OF_LOGICAL_CORES}). Set PARALLEL_LINK_JOBS to override.")
|
||||||
|
endif()
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
# ThinLTO provides its own parallel linking
|
# ThinLTO provides its own parallel linking
|
||||||
@ -46,14 +47,16 @@ if (CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO" AND ENABLE_THINLTO AND PARALLE
|
|||||||
set (PARALLEL_LINK_JOBS 2)
|
set (PARALLEL_LINK_JOBS 2)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if (PARALLEL_LINK_JOBS AND (NOT NUMBER_OF_LOGICAL_CORES OR PARALLEL_LINK_JOBS LESS NUMBER_OF_LOGICAL_CORES))
|
message(STATUS "Building sub-tree with ${PARALLEL_COMPILE_JOBS} compile jobs and ${PARALLEL_LINK_JOBS} linker jobs (system: ${NUMBER_OF_LOGICAL_CORES} cores, ${TOTAL_PHYSICAL_MEMORY} MB DRAM, 'OFF' means the native core count).")
|
||||||
|
|
||||||
|
if (PARALLEL_COMPILE_JOBS LESS NUMBER_OF_LOGICAL_CORES)
|
||||||
|
set(CMAKE_JOB_POOL_COMPILE compile_job_pool${CMAKE_CURRENT_SOURCE_DIR})
|
||||||
|
string (REGEX REPLACE "[^a-zA-Z0-9]+" "_" CMAKE_JOB_POOL_COMPILE ${CMAKE_JOB_POOL_COMPILE})
|
||||||
|
set_property(GLOBAL APPEND PROPERTY JOB_POOLS ${CMAKE_JOB_POOL_COMPILE}=${PARALLEL_COMPILE_JOBS})
|
||||||
|
endif ()
|
||||||
|
|
||||||
|
if (PARALLEL_LINK_JOBS LESS NUMBER_OF_LOGICAL_CORES)
|
||||||
set(CMAKE_JOB_POOL_LINK link_job_pool${CMAKE_CURRENT_SOURCE_DIR})
|
set(CMAKE_JOB_POOL_LINK link_job_pool${CMAKE_CURRENT_SOURCE_DIR})
|
||||||
string (REGEX REPLACE "[^a-zA-Z0-9]+" "_" CMAKE_JOB_POOL_LINK ${CMAKE_JOB_POOL_LINK})
|
string (REGEX REPLACE "[^a-zA-Z0-9]+" "_" CMAKE_JOB_POOL_LINK ${CMAKE_JOB_POOL_LINK})
|
||||||
set_property(GLOBAL APPEND PROPERTY JOB_POOLS ${CMAKE_JOB_POOL_LINK}=${PARALLEL_LINK_JOBS})
|
set_property(GLOBAL APPEND PROPERTY JOB_POOLS ${CMAKE_JOB_POOL_LINK}=${PARALLEL_LINK_JOBS})
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (PARALLEL_COMPILE_JOBS OR PARALLEL_LINK_JOBS)
|
|
||||||
message(STATUS
|
|
||||||
"${CMAKE_CURRENT_SOURCE_DIR}: Have ${TOTAL_PHYSICAL_MEMORY} megabytes of memory.
|
|
||||||
Limiting concurrent linkers jobs to ${PARALLEL_LINK_JOBS} and compiler jobs to ${PARALLEL_COMPILE_JOBS} (system has ${NUMBER_OF_LOGICAL_CORES} logical cores)")
|
|
||||||
endif ()
|
|
||||||
|
@ -33,6 +33,18 @@ if (CMAKE_CROSSCOMPILING)
|
|||||||
elseif (ARCH_PPC64LE)
|
elseif (ARCH_PPC64LE)
|
||||||
set (ENABLE_GRPC OFF CACHE INTERNAL "")
|
set (ENABLE_GRPC OFF CACHE INTERNAL "")
|
||||||
set (ENABLE_SENTRY OFF CACHE INTERNAL "")
|
set (ENABLE_SENTRY OFF CACHE INTERNAL "")
|
||||||
|
elseif (ARCH_RISCV64)
|
||||||
|
# RISC-V support is preliminary
|
||||||
|
set (GLIBC_COMPATIBILITY OFF CACHE INTERNAL "")
|
||||||
|
set (ENABLE_LDAP OFF CACHE INTERNAL "")
|
||||||
|
set (OPENSSL_NO_ASM ON CACHE INTERNAL "")
|
||||||
|
set (ENABLE_JEMALLOC ON CACHE INTERNAL "")
|
||||||
|
set (ENABLE_PARQUET OFF CACHE INTERNAL "")
|
||||||
|
set (ENABLE_GRPC OFF CACHE INTERNAL "")
|
||||||
|
set (ENABLE_HDFS OFF CACHE INTERNAL "")
|
||||||
|
set (ENABLE_MYSQL OFF CACHE INTERNAL "")
|
||||||
|
# It might be ok, but we need to update 'sysroot'
|
||||||
|
set (ENABLE_RUST OFF CACHE INTERNAL "")
|
||||||
elseif (ARCH_S390X)
|
elseif (ARCH_S390X)
|
||||||
set (ENABLE_GRPC OFF CACHE INTERNAL "")
|
set (ENABLE_GRPC OFF CACHE INTERNAL "")
|
||||||
set (ENABLE_SENTRY OFF CACHE INTERNAL "")
|
set (ENABLE_SENTRY OFF CACHE INTERNAL "")
|
||||||
|
@ -1,13 +1 @@
|
|||||||
option (USE_UNWIND "Enable libunwind (better stacktraces)" ${ENABLE_LIBRARIES})
|
add_subdirectory(contrib/libunwind-cmake)
|
||||||
|
|
||||||
if (USE_UNWIND)
|
|
||||||
add_subdirectory(contrib/libunwind-cmake)
|
|
||||||
set (UNWIND_LIBRARIES unwind)
|
|
||||||
set (EXCEPTION_HANDLING_LIBRARY ${UNWIND_LIBRARIES})
|
|
||||||
|
|
||||||
message (STATUS "Using libunwind: ${UNWIND_LIBRARIES}")
|
|
||||||
else ()
|
|
||||||
set (EXCEPTION_HANDLING_LIBRARY gcc_eh)
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
message (STATUS "Using exception handler: ${EXCEPTION_HANDLING_LIBRARY}")
|
|
||||||
|
7
contrib/CMakeLists.txt
vendored
7
contrib/CMakeLists.txt
vendored
@ -146,7 +146,7 @@ add_contrib (amqpcpp-cmake AMQP-CPP) # requires: libuv
|
|||||||
add_contrib (cassandra-cmake cassandra) # requires: libuv
|
add_contrib (cassandra-cmake cassandra) # requires: libuv
|
||||||
if (NOT OS_DARWIN)
|
if (NOT OS_DARWIN)
|
||||||
add_contrib (curl-cmake curl)
|
add_contrib (curl-cmake curl)
|
||||||
add_contrib (azure-cmake azure)
|
add_contrib (azure-cmake azure) # requires: curl
|
||||||
add_contrib (sentry-native-cmake sentry-native) # requires: curl
|
add_contrib (sentry-native-cmake sentry-native) # requires: curl
|
||||||
endif()
|
endif()
|
||||||
add_contrib (fmtlib-cmake fmtlib)
|
add_contrib (fmtlib-cmake fmtlib)
|
||||||
@ -157,21 +157,20 @@ add_contrib (librdkafka-cmake librdkafka) # requires: libgsasl
|
|||||||
add_contrib (nats-io-cmake nats-io)
|
add_contrib (nats-io-cmake nats-io)
|
||||||
add_contrib (isa-l-cmake isa-l)
|
add_contrib (isa-l-cmake isa-l)
|
||||||
add_contrib (libhdfs3-cmake libhdfs3) # requires: google-protobuf, krb5, isa-l
|
add_contrib (libhdfs3-cmake libhdfs3) # requires: google-protobuf, krb5, isa-l
|
||||||
add_contrib (hive-metastore-cmake hive-metastore) # requires: thrift/avro/arrow/libhdfs3
|
add_contrib (hive-metastore-cmake hive-metastore) # requires: thrift, avro, arrow, libhdfs3
|
||||||
add_contrib (cppkafka-cmake cppkafka)
|
add_contrib (cppkafka-cmake cppkafka)
|
||||||
add_contrib (libpqxx-cmake libpqxx)
|
add_contrib (libpqxx-cmake libpqxx)
|
||||||
add_contrib (libpq-cmake libpq)
|
add_contrib (libpq-cmake libpq)
|
||||||
add_contrib (nuraft-cmake NuRaft)
|
add_contrib (nuraft-cmake NuRaft)
|
||||||
add_contrib (fast_float-cmake fast_float)
|
add_contrib (fast_float-cmake fast_float)
|
||||||
add_contrib (datasketches-cpp-cmake datasketches-cpp)
|
add_contrib (datasketches-cpp-cmake datasketches-cpp)
|
||||||
add_contrib (hashidsxx-cmake hashidsxx)
|
add_contrib (incbin-cmake incbin)
|
||||||
|
|
||||||
option(ENABLE_NLP "Enable NLP functions support" ${ENABLE_LIBRARIES})
|
option(ENABLE_NLP "Enable NLP functions support" ${ENABLE_LIBRARIES})
|
||||||
if (ENABLE_NLP)
|
if (ENABLE_NLP)
|
||||||
add_contrib (libstemmer-c-cmake libstemmer_c)
|
add_contrib (libstemmer-c-cmake libstemmer_c)
|
||||||
add_contrib (wordnet-blast-cmake wordnet-blast)
|
add_contrib (wordnet-blast-cmake wordnet-blast)
|
||||||
add_contrib (lemmagen-c-cmake lemmagen-c)
|
add_contrib (lemmagen-c-cmake lemmagen-c)
|
||||||
add_contrib (nlp-data-cmake nlp-data)
|
|
||||||
add_contrib (cld2-cmake cld2)
|
add_contrib (cld2-cmake cld2)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
|
2
contrib/NuRaft
vendored
2
contrib/NuRaft
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 491eaf592d950e0e37accbe8b3f217e068c9fecf
|
Subproject commit eb1572129c71beb2156dcdaadc3fb136954aed96
|
@ -17,3 +17,17 @@ get_target_property(FLAT_HASH_SET_INCLUDE_DIR absl::flat_hash_set INTERFACE_INCL
|
|||||||
target_include_directories (_abseil_swiss_tables SYSTEM BEFORE INTERFACE ${FLAT_HASH_SET_INCLUDE_DIR})
|
target_include_directories (_abseil_swiss_tables SYSTEM BEFORE INTERFACE ${FLAT_HASH_SET_INCLUDE_DIR})
|
||||||
|
|
||||||
add_library(ch_contrib::abseil_swiss_tables ALIAS _abseil_swiss_tables)
|
add_library(ch_contrib::abseil_swiss_tables ALIAS _abseil_swiss_tables)
|
||||||
|
|
||||||
|
set(ABSL_FORMAT_SRC
|
||||||
|
${ABSL_ROOT_DIR}/absl/strings/internal/str_format/arg.cc
|
||||||
|
${ABSL_ROOT_DIR}/absl/strings/internal/str_format/bind.cc
|
||||||
|
${ABSL_ROOT_DIR}/absl/strings/internal/str_format/extension.cc
|
||||||
|
${ABSL_ROOT_DIR}/absl/strings/internal/str_format/float_conversion.cc
|
||||||
|
${ABSL_ROOT_DIR}/absl/strings/internal/str_format/output.cc
|
||||||
|
${ABSL_ROOT_DIR}/absl/strings/internal/str_format/parser.cc
|
||||||
|
)
|
||||||
|
|
||||||
|
add_library(_abseil_str_format ${ABSL_FORMAT_SRC})
|
||||||
|
target_include_directories(_abseil_str_format PUBLIC ${ABSL_ROOT_DIR})
|
||||||
|
|
||||||
|
add_library(ch_contrib::abseil_str_format ALIAS _abseil_str_format)
|
||||||
|
@ -31,12 +31,12 @@ endif()
|
|||||||
|
|
||||||
set (CMAKE_CXX_STANDARD 17)
|
set (CMAKE_CXX_STANDARD 17)
|
||||||
|
|
||||||
set(ARROW_VERSION "6.0.1")
|
set(ARROW_VERSION "11.0.0")
|
||||||
string(REGEX MATCH "^[0-9]+\\.[0-9]+\\.[0-9]+" ARROW_BASE_VERSION "${ARROW_VERSION}")
|
string(REGEX MATCH "^[0-9]+\\.[0-9]+\\.[0-9]+" ARROW_BASE_VERSION "${ARROW_VERSION}")
|
||||||
|
|
||||||
set(ARROW_VERSION_MAJOR "6")
|
set(ARROW_VERSION_MAJOR "11")
|
||||||
set(ARROW_VERSION_MINOR "0")
|
set(ARROW_VERSION_MINOR "0")
|
||||||
set(ARROW_VERSION_PATCH "1")
|
set(ARROW_VERSION_PATCH "0")
|
||||||
|
|
||||||
if(ARROW_VERSION_MAJOR STREQUAL "0")
|
if(ARROW_VERSION_MAJOR STREQUAL "0")
|
||||||
# Arrow 0.x.y => SO version is "x", full SO version is "x.y.0"
|
# Arrow 0.x.y => SO version is "x", full SO version is "x.y.0"
|
||||||
@ -116,43 +116,79 @@ configure_file("${ORC_SOURCE_SRC_DIR}/Adaptor.hh.in" "${ORC_BUILD_INCLUDE_DIR}/A
|
|||||||
# ARROW_ORC + adapters/orc/CMakefiles
|
# ARROW_ORC + adapters/orc/CMakefiles
|
||||||
set(ORC_SRCS
|
set(ORC_SRCS
|
||||||
"${CMAKE_CURRENT_BINARY_DIR}/orc_proto.pb.h"
|
"${CMAKE_CURRENT_BINARY_DIR}/orc_proto.pb.h"
|
||||||
"${ORC_SOURCE_SRC_DIR}/sargs/ExpressionTree.cc"
|
"${ORC_ADDITION_SOURCE_DIR}/orc_proto.pb.cc"
|
||||||
"${ORC_SOURCE_SRC_DIR}/sargs/Literal.cc"
|
"${ORC_SOURCE_SRC_DIR}/Adaptor.cc"
|
||||||
"${ORC_SOURCE_SRC_DIR}/sargs/PredicateLeaf.cc"
|
"${ORC_SOURCE_SRC_DIR}/Adaptor.hh.in"
|
||||||
"${ORC_SOURCE_SRC_DIR}/sargs/SargsApplier.cc"
|
"${ORC_SOURCE_SRC_DIR}/BlockBuffer.cc"
|
||||||
"${ORC_SOURCE_SRC_DIR}/sargs/SearchArgument.cc"
|
"${ORC_SOURCE_SRC_DIR}/BlockBuffer.hh"
|
||||||
"${ORC_SOURCE_SRC_DIR}/sargs/TruthValue.cc"
|
"${ORC_SOURCE_SRC_DIR}/BloomFilter.cc"
|
||||||
"${ORC_SOURCE_SRC_DIR}/Exceptions.cc"
|
"${ORC_SOURCE_SRC_DIR}/BloomFilter.hh"
|
||||||
"${ORC_SOURCE_SRC_DIR}/OrcFile.cc"
|
"${ORC_SOURCE_SRC_DIR}/Bpacking.hh"
|
||||||
"${ORC_SOURCE_SRC_DIR}/Reader.cc"
|
"${ORC_SOURCE_SRC_DIR}/BpackingDefault.cc"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/BpackingDefault.hh"
|
||||||
"${ORC_SOURCE_SRC_DIR}/ByteRLE.cc"
|
"${ORC_SOURCE_SRC_DIR}/ByteRLE.cc"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/ByteRLE.hh"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/CMakeLists.txt"
|
||||||
"${ORC_SOURCE_SRC_DIR}/ColumnPrinter.cc"
|
"${ORC_SOURCE_SRC_DIR}/ColumnPrinter.cc"
|
||||||
"${ORC_SOURCE_SRC_DIR}/ColumnReader.cc"
|
"${ORC_SOURCE_SRC_DIR}/ColumnReader.cc"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/ColumnReader.hh"
|
||||||
"${ORC_SOURCE_SRC_DIR}/ColumnWriter.cc"
|
"${ORC_SOURCE_SRC_DIR}/ColumnWriter.cc"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/ColumnWriter.hh"
|
||||||
"${ORC_SOURCE_SRC_DIR}/Common.cc"
|
"${ORC_SOURCE_SRC_DIR}/Common.cc"
|
||||||
"${ORC_SOURCE_SRC_DIR}/Compression.cc"
|
"${ORC_SOURCE_SRC_DIR}/Compression.cc"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/Compression.hh"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/ConvertColumnReader.cc"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/ConvertColumnReader.hh"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/CpuInfoUtil.cc"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/CpuInfoUtil.hh"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/Dispatch.hh"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/Exceptions.cc"
|
||||||
"${ORC_SOURCE_SRC_DIR}/Int128.cc"
|
"${ORC_SOURCE_SRC_DIR}/Int128.cc"
|
||||||
"${ORC_SOURCE_SRC_DIR}/LzoDecompressor.cc"
|
"${ORC_SOURCE_SRC_DIR}/LzoDecompressor.cc"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/LzoDecompressor.hh"
|
||||||
"${ORC_SOURCE_SRC_DIR}/MemoryPool.cc"
|
"${ORC_SOURCE_SRC_DIR}/MemoryPool.cc"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/Murmur3.cc"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/Murmur3.hh"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/Options.hh"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/OrcFile.cc"
|
||||||
"${ORC_SOURCE_SRC_DIR}/RLE.cc"
|
"${ORC_SOURCE_SRC_DIR}/RLE.cc"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/RLE.hh"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/RLEV2Util.cc"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/RLEV2Util.hh"
|
||||||
"${ORC_SOURCE_SRC_DIR}/RLEv1.cc"
|
"${ORC_SOURCE_SRC_DIR}/RLEv1.cc"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/RLEv1.hh"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/RLEv2.hh"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/Reader.cc"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/Reader.hh"
|
||||||
"${ORC_SOURCE_SRC_DIR}/RleDecoderV2.cc"
|
"${ORC_SOURCE_SRC_DIR}/RleDecoderV2.cc"
|
||||||
"${ORC_SOURCE_SRC_DIR}/RleEncoderV2.cc"
|
"${ORC_SOURCE_SRC_DIR}/RleEncoderV2.cc"
|
||||||
"${ORC_SOURCE_SRC_DIR}/RLEV2Util.cc"
|
"${ORC_SOURCE_SRC_DIR}/SchemaEvolution.cc"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/SchemaEvolution.hh"
|
||||||
"${ORC_SOURCE_SRC_DIR}/Statistics.cc"
|
"${ORC_SOURCE_SRC_DIR}/Statistics.cc"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/Statistics.hh"
|
||||||
"${ORC_SOURCE_SRC_DIR}/StripeStream.cc"
|
"${ORC_SOURCE_SRC_DIR}/StripeStream.cc"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/StripeStream.hh"
|
||||||
"${ORC_SOURCE_SRC_DIR}/Timezone.cc"
|
"${ORC_SOURCE_SRC_DIR}/Timezone.cc"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/Timezone.hh"
|
||||||
"${ORC_SOURCE_SRC_DIR}/TypeImpl.cc"
|
"${ORC_SOURCE_SRC_DIR}/TypeImpl.cc"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/TypeImpl.hh"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/Utils.hh"
|
||||||
"${ORC_SOURCE_SRC_DIR}/Vector.cc"
|
"${ORC_SOURCE_SRC_DIR}/Vector.cc"
|
||||||
"${ORC_SOURCE_SRC_DIR}/Writer.cc"
|
"${ORC_SOURCE_SRC_DIR}/Writer.cc"
|
||||||
"${ORC_SOURCE_SRC_DIR}/Adaptor.cc"
|
|
||||||
"${ORC_SOURCE_SRC_DIR}/BloomFilter.cc"
|
|
||||||
"${ORC_SOURCE_SRC_DIR}/Murmur3.cc"
|
|
||||||
"${ORC_SOURCE_SRC_DIR}/BlockBuffer.cc"
|
|
||||||
"${ORC_SOURCE_SRC_DIR}/wrap/orc-proto-wrapper.cc"
|
|
||||||
"${ORC_SOURCE_SRC_DIR}/io/InputStream.cc"
|
"${ORC_SOURCE_SRC_DIR}/io/InputStream.cc"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/io/InputStream.hh"
|
||||||
"${ORC_SOURCE_SRC_DIR}/io/OutputStream.cc"
|
"${ORC_SOURCE_SRC_DIR}/io/OutputStream.cc"
|
||||||
"${ORC_ADDITION_SOURCE_DIR}/orc_proto.pb.cc"
|
"${ORC_SOURCE_SRC_DIR}/io/OutputStream.hh"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/sargs/ExpressionTree.cc"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/sargs/ExpressionTree.hh"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/sargs/Literal.cc"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/sargs/PredicateLeaf.cc"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/sargs/PredicateLeaf.hh"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/sargs/SargsApplier.cc"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/sargs/SargsApplier.hh"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/sargs/SearchArgument.cc"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/sargs/SearchArgument.hh"
|
||||||
|
"${ORC_SOURCE_SRC_DIR}/sargs/TruthValue.cc"
|
||||||
)
|
)
|
||||||
|
|
||||||
add_library(_orc ${ORC_SRCS})
|
add_library(_orc ${ORC_SRCS})
|
||||||
@ -466,9 +502,10 @@ target_include_directories(_parquet SYSTEM BEFORE
|
|||||||
"${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/src"
|
"${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/src"
|
||||||
"${CMAKE_CURRENT_SOURCE_DIR}/cpp/src")
|
"${CMAKE_CURRENT_SOURCE_DIR}/cpp/src")
|
||||||
target_link_libraries(_parquet
|
target_link_libraries(_parquet
|
||||||
PUBLIC _arrow
|
PUBLIC
|
||||||
PRIVATE
|
_arrow
|
||||||
ch_contrib::thrift
|
ch_contrib::thrift
|
||||||
|
PRIVATE
|
||||||
boost::headers_only
|
boost::headers_only
|
||||||
boost::regex
|
boost::regex
|
||||||
OpenSSL::Crypto OpenSSL::SSL)
|
OpenSSL::Crypto OpenSSL::SSL)
|
||||||
@ -478,6 +515,10 @@ if (SANITIZE STREQUAL "undefined")
|
|||||||
target_compile_options(_arrow PRIVATE -fno-sanitize=undefined)
|
target_compile_options(_arrow PRIVATE -fno-sanitize=undefined)
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
|
# Define Thrift version for parquet (we use 0.16.0)
|
||||||
|
add_definitions(-DPARQUET_THRIFT_VERSION_MAJOR=0)
|
||||||
|
add_definitions(-DPARQUET_THRIFT_VERSION_MINOR=16)
|
||||||
|
|
||||||
# === tools
|
# === tools
|
||||||
|
|
||||||
set(TOOLS_DIR "${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/tools/parquet")
|
set(TOOLS_DIR "${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/tools/parquet")
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
option (ENABLE_AZURE_BLOB_STORAGE "Enable Azure blob storage" ${ENABLE_LIBRARIES})
|
option (ENABLE_AZURE_BLOB_STORAGE "Enable Azure blob storage" ${ENABLE_LIBRARIES})
|
||||||
|
|
||||||
if (NOT ENABLE_AZURE_BLOB_STORAGE OR BUILD_STANDALONE_KEEPER OR OS_FREEBSD OR (NOT ARCH_AMD64))
|
if (NOT ENABLE_AZURE_BLOB_STORAGE OR OS_FREEBSD)
|
||||||
message(STATUS "Not using Azure blob storage")
|
message(STATUS "Not using Azure blob storage")
|
||||||
return()
|
return()
|
||||||
endif()
|
endif()
|
||||||
|
2
contrib/cctz
vendored
2
contrib/cctz
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 5e05432420f9692418e2e12aff09859e420b14a2
|
Subproject commit 8529bcef5cd996b7c0f4d7475286b76b5d126c4c
|
@ -1,4 +1,3 @@
|
|||||||
include(${ClickHouse_SOURCE_DIR}/cmake/embed_binary.cmake)
|
|
||||||
set(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/cctz")
|
set(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/cctz")
|
||||||
|
|
||||||
set (SRCS
|
set (SRCS
|
||||||
@ -23,12 +22,10 @@ if (OS_FREEBSD)
|
|||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
# Related to time_zones table:
|
# Related to time_zones table:
|
||||||
# StorageSystemTimeZones.generated.cpp is autogenerated each time during a build
|
# TimeZones.generated.cpp is autogenerated each time during a build
|
||||||
# data in this file will be used to populate the system.time_zones table, this is specific to OS_LINUX
|
set(TIMEZONES_FILE "${CMAKE_CURRENT_BINARY_DIR}/TimeZones.generated.cpp")
|
||||||
# as the library that's built using embedded tzdata is also specific to OS_LINUX
|
|
||||||
set(SYSTEM_STORAGE_TZ_FILE "${PROJECT_BINARY_DIR}/src/Storages/System/StorageSystemTimeZones.generated.cpp")
|
|
||||||
# remove existing copies so that its generated fresh on each build.
|
# remove existing copies so that its generated fresh on each build.
|
||||||
file(REMOVE ${SYSTEM_STORAGE_TZ_FILE})
|
file(REMOVE ${TIMEZONES_FILE})
|
||||||
|
|
||||||
# get the list of timezones from tzdata shipped with cctz
|
# get the list of timezones from tzdata shipped with cctz
|
||||||
set(TZDIR "${LIBRARY_DIR}/testdata/zoneinfo")
|
set(TZDIR "${LIBRARY_DIR}/testdata/zoneinfo")
|
||||||
@ -36,28 +33,44 @@ file(STRINGS "${LIBRARY_DIR}/testdata/version" TZDATA_VERSION)
|
|||||||
set_property(GLOBAL PROPERTY TZDATA_VERSION_PROP "${TZDATA_VERSION}")
|
set_property(GLOBAL PROPERTY TZDATA_VERSION_PROP "${TZDATA_VERSION}")
|
||||||
message(STATUS "Packaging with tzdata version: ${TZDATA_VERSION}")
|
message(STATUS "Packaging with tzdata version: ${TZDATA_VERSION}")
|
||||||
|
|
||||||
set(TIMEZONE_RESOURCE_FILES)
|
|
||||||
|
|
||||||
# each file in that dir (except of tab and localtime) store the info about timezone
|
# each file in that dir (except of tab and localtime) store the info about timezone
|
||||||
execute_process(COMMAND
|
execute_process(COMMAND
|
||||||
bash -c "cd ${TZDIR} && find * -type f -and ! -name '*.tab' -and ! -name 'localtime' | LC_ALL=C sort | paste -sd ';' -"
|
bash -c "cd ${TZDIR} && find * -type f -and ! -name '*.tab' -and ! -name 'localtime' | LC_ALL=C sort | paste -sd ';' -"
|
||||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||||
OUTPUT_VARIABLE TIMEZONES)
|
OUTPUT_VARIABLE TIMEZONES)
|
||||||
|
|
||||||
file(APPEND ${SYSTEM_STORAGE_TZ_FILE} "// autogenerated by ClickHouse/contrib/cctz-cmake/CMakeLists.txt\n")
|
file(APPEND ${TIMEZONES_FILE} "// autogenerated by ClickHouse/contrib/cctz-cmake/CMakeLists.txt\n")
|
||||||
file(APPEND ${SYSTEM_STORAGE_TZ_FILE} "const char * auto_time_zones[] {\n" )
|
file(APPEND ${TIMEZONES_FILE} "#include <incbin.h>\n")
|
||||||
|
|
||||||
|
set (COUNTER 1)
|
||||||
|
foreach(TIMEZONE ${TIMEZONES})
|
||||||
|
file(APPEND ${TIMEZONES_FILE} "INCBIN(resource_timezone${COUNTER}, \"${TZDIR}/${TIMEZONE}\");\n")
|
||||||
|
MATH(EXPR COUNTER "${COUNTER}+1")
|
||||||
|
endforeach(TIMEZONE)
|
||||||
|
|
||||||
|
file(APPEND ${TIMEZONES_FILE} "const char * auto_time_zones[] {\n" )
|
||||||
|
|
||||||
foreach(TIMEZONE ${TIMEZONES})
|
foreach(TIMEZONE ${TIMEZONES})
|
||||||
file(APPEND ${SYSTEM_STORAGE_TZ_FILE} " \"${TIMEZONE}\",\n")
|
file(APPEND ${TIMEZONES_FILE} " \"${TIMEZONE}\",\n")
|
||||||
list(APPEND TIMEZONE_RESOURCE_FILES "${TIMEZONE}")
|
MATH(EXPR COUNTER "${COUNTER}+1")
|
||||||
endforeach(TIMEZONE)
|
endforeach(TIMEZONE)
|
||||||
file(APPEND ${SYSTEM_STORAGE_TZ_FILE} " nullptr};\n")
|
|
||||||
clickhouse_embed_binaries(
|
file(APPEND ${TIMEZONES_FILE} " nullptr\n};\n\n")
|
||||||
TARGET tzdata
|
|
||||||
RESOURCE_DIR "${TZDIR}"
|
file(APPEND ${TIMEZONES_FILE} "#include <string_view>\n\n")
|
||||||
RESOURCES ${TIMEZONE_RESOURCE_FILES}
|
file(APPEND ${TIMEZONES_FILE} "std::string_view getTimeZone(const char * name)\n{\n" )
|
||||||
)
|
|
||||||
add_dependencies(_cctz tzdata)
|
set (COUNTER 1)
|
||||||
target_link_libraries(_cctz INTERFACE "-Wl,${WHOLE_ARCHIVE} $<TARGET_FILE:tzdata> -Wl,${NO_WHOLE_ARCHIVE}")
|
foreach(TIMEZONE ${TIMEZONES})
|
||||||
|
file(APPEND ${TIMEZONES_FILE} " if (std::string_view(\"${TIMEZONE}\") == name) return { reinterpret_cast<const char *>(gresource_timezone${COUNTER}Data), gresource_timezone${COUNTER}Size };\n")
|
||||||
|
MATH(EXPR COUNTER "${COUNTER}+1")
|
||||||
|
endforeach(TIMEZONE)
|
||||||
|
|
||||||
|
file(APPEND ${TIMEZONES_FILE} " return {};\n")
|
||||||
|
file(APPEND ${TIMEZONES_FILE} "}\n")
|
||||||
|
|
||||||
|
add_library (tzdata ${TIMEZONES_FILE})
|
||||||
|
target_link_libraries(tzdata ch_contrib::incbin)
|
||||||
|
target_link_libraries(_cctz tzdata)
|
||||||
|
|
||||||
add_library(ch_contrib::cctz ALIAS _cctz)
|
add_library(ch_contrib::cctz ALIAS _cctz)
|
||||||
|
@ -61,11 +61,24 @@ namespace CityHash_v1_0_2
|
|||||||
typedef uint8_t uint8;
|
typedef uint8_t uint8;
|
||||||
typedef uint32_t uint32;
|
typedef uint32_t uint32;
|
||||||
typedef uint64_t uint64;
|
typedef uint64_t uint64;
|
||||||
typedef std::pair<uint64, uint64> uint128;
|
|
||||||
|
|
||||||
|
/// Represent an unsigned integer of 128 bits as it's used in CityHash.
|
||||||
|
/// Originally CityHash used `std::pair<uint64, uint64>` instead of this struct,
|
||||||
|
/// however the members `first` and `second` could be easily confused so they were renamed to `low64` and `high64`:
|
||||||
|
/// `first` -> `low64`, `second` -> `high64`.
|
||||||
|
struct uint128
|
||||||
|
{
|
||||||
|
uint64 low64 = 0;
|
||||||
|
uint64 high64 = 0;
|
||||||
|
|
||||||
inline uint64 Uint128Low64(const uint128& x) { return x.first; }
|
uint128() = default;
|
||||||
inline uint64 Uint128High64(const uint128& x) { return x.second; }
|
uint128(uint64 low64_, uint64 high64_) : low64(low64_), high64(high64_) {}
|
||||||
|
friend bool operator ==(const uint128 & x, const uint128 & y) { return (x.low64 == y.low64) && (x.high64 == y.high64); }
|
||||||
|
friend bool operator !=(const uint128 & x, const uint128 & y) { return !(x == y); }
|
||||||
|
};
|
||||||
|
|
||||||
|
inline uint64 Uint128Low64(const uint128 & x) { return x.low64; }
|
||||||
|
inline uint64 Uint128High64(const uint128 & x) { return x.high64; }
|
||||||
|
|
||||||
// Hash function for a byte array.
|
// Hash function for a byte array.
|
||||||
uint64 CityHash64(const char *buf, size_t len);
|
uint64 CityHash64(const char *buf, size_t len);
|
||||||
|
1
contrib/hashidsxx
vendored
1
contrib/hashidsxx
vendored
@ -1 +0,0 @@
|
|||||||
Subproject commit 783f6911ccfdaca83e3cfac084c4aad888a80cee
|
|
@ -1,14 +0,0 @@
|
|||||||
set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/hashidsxx")
|
|
||||||
|
|
||||||
set (SRCS
|
|
||||||
"${LIBRARY_DIR}/hashids.cpp"
|
|
||||||
)
|
|
||||||
|
|
||||||
set (HDRS
|
|
||||||
"${LIBRARY_DIR}/hashids.h"
|
|
||||||
)
|
|
||||||
|
|
||||||
add_library(_hashidsxx ${SRCS} ${HDRS})
|
|
||||||
target_include_directories(_hashidsxx SYSTEM PUBLIC "${LIBRARY_DIR}")
|
|
||||||
|
|
||||||
add_library(ch_contrib::hashidsxx ALIAS _hashidsxx)
|
|
2
contrib/idxd-config
vendored
2
contrib/idxd-config
vendored
@ -1 +1 @@
|
|||||||
Subproject commit f6605c41a735e3fdfef2d2d18655a33af6490b99
|
Subproject commit a836ce0e42052a69bffbbc14239ab4097f3b77f1
|
1
contrib/incbin
vendored
Submodule
1
contrib/incbin
vendored
Submodule
@ -0,0 +1 @@
|
|||||||
|
Subproject commit 6e576cae5ab5810f25e2631f2e0b80cbe7dc8cbf
|
8
contrib/incbin-cmake/CMakeLists.txt
Normal file
8
contrib/incbin-cmake/CMakeLists.txt
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
set(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/incbin")
|
||||||
|
add_library(_incbin INTERFACE)
|
||||||
|
target_include_directories(_incbin SYSTEM INTERFACE ${LIBRARY_DIR})
|
||||||
|
add_library(ch_contrib::incbin ALIAS _incbin)
|
||||||
|
|
||||||
|
# Warning "incbin is incompatible with bitcode. Using the library will break upload to App Store if you have bitcode enabled.
|
||||||
|
# Add `#define INCBIN_SILENCE_BITCODE_WARNING` before including this header to silence this warning."
|
||||||
|
target_compile_definitions(_incbin INTERFACE INCBIN_SILENCE_BITCODE_WARNING)
|
@ -1,5 +1,5 @@
|
|||||||
if (SANITIZE OR NOT (
|
if (SANITIZE OR NOT (
|
||||||
((OS_LINUX OR OS_FREEBSD) AND (ARCH_AMD64 OR ARCH_AARCH64 OR ARCH_PPC64LE OR ARCH_RISCV64)) OR
|
((OS_LINUX OR OS_FREEBSD) AND (ARCH_AMD64 OR ARCH_AARCH64 OR ARCH_PPC64LE OR ARCH_RISCV64 OR ARCH_S390X)) OR
|
||||||
(OS_DARWIN AND (CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO" OR CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG"))
|
(OS_DARWIN AND (CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO" OR CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG"))
|
||||||
))
|
))
|
||||||
if (ENABLE_JEMALLOC)
|
if (ENABLE_JEMALLOC)
|
||||||
@ -17,17 +17,17 @@ if (NOT ENABLE_JEMALLOC)
|
|||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (NOT OS_LINUX)
|
if (NOT OS_LINUX)
|
||||||
message (WARNING "jemalloc support on non-linux is EXPERIMENTAL")
|
message (WARNING "jemalloc support on non-Linux is EXPERIMENTAL")
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if (OS_LINUX)
|
if (OS_LINUX)
|
||||||
# ThreadPool select job randomly, and there can be some threads that had been
|
# ThreadPool select job randomly, and there can be some threads that have been
|
||||||
# performed some memory heavy task before and will be inactive for some time,
|
# performed some memory-heavy tasks before and will be inactive for some time,
|
||||||
# but until it will became active again, the memory will not be freed since by
|
# but until it becomes active again, the memory will not be freed since, by
|
||||||
# default each thread has it's own arena, but there should be not more then
|
# default, each thread has its arena, but there should be no more than
|
||||||
# 4*CPU arenas (see opt.nareans description).
|
# 4*CPU arenas (see opt.nareans description).
|
||||||
#
|
#
|
||||||
# By enabling percpu_arena number of arenas limited to number of CPUs and hence
|
# By enabling percpu_arena number of arenas is limited to the number of CPUs, and hence
|
||||||
# this problem should go away.
|
# this problem should go away.
|
||||||
#
|
#
|
||||||
# muzzy_decay_ms -- use MADV_FREE when available on newer Linuxes, to
|
# muzzy_decay_ms -- use MADV_FREE when available on newer Linuxes, to
|
||||||
@ -38,7 +38,7 @@ if (OS_LINUX)
|
|||||||
else()
|
else()
|
||||||
set (JEMALLOC_CONFIG_MALLOC_CONF "oversize_threshold:0,muzzy_decay_ms:5000,dirty_decay_ms:5000")
|
set (JEMALLOC_CONFIG_MALLOC_CONF "oversize_threshold:0,muzzy_decay_ms:5000,dirty_decay_ms:5000")
|
||||||
endif()
|
endif()
|
||||||
# CACHE variable is empty, to allow changing defaults without necessity
|
# CACHE variable is empty to allow changing defaults without the necessity
|
||||||
# to purge cache
|
# to purge cache
|
||||||
set (JEMALLOC_CONFIG_MALLOC_CONF_OVERRIDE "" CACHE STRING "Change default configuration string of JEMalloc" )
|
set (JEMALLOC_CONFIG_MALLOC_CONF_OVERRIDE "" CACHE STRING "Change default configuration string of JEMalloc" )
|
||||||
if (JEMALLOC_CONFIG_MALLOC_CONF_OVERRIDE)
|
if (JEMALLOC_CONFIG_MALLOC_CONF_OVERRIDE)
|
||||||
@ -148,6 +148,8 @@ elseif (ARCH_PPC64LE)
|
|||||||
set(JEMALLOC_INCLUDE_PREFIX "${JEMALLOC_INCLUDE_PREFIX}_ppc64le")
|
set(JEMALLOC_INCLUDE_PREFIX "${JEMALLOC_INCLUDE_PREFIX}_ppc64le")
|
||||||
elseif (ARCH_RISCV64)
|
elseif (ARCH_RISCV64)
|
||||||
set(JEMALLOC_INCLUDE_PREFIX "${JEMALLOC_INCLUDE_PREFIX}_riscv64")
|
set(JEMALLOC_INCLUDE_PREFIX "${JEMALLOC_INCLUDE_PREFIX}_riscv64")
|
||||||
|
elseif (ARCH_S390X)
|
||||||
|
set(JEMALLOC_INCLUDE_PREFIX "${JEMALLOC_INCLUDE_PREFIX}_s390x")
|
||||||
else ()
|
else ()
|
||||||
message (FATAL_ERROR "internal jemalloc: This arch is not supported")
|
message (FATAL_ERROR "internal jemalloc: This arch is not supported")
|
||||||
endif ()
|
endif ()
|
||||||
@ -170,16 +172,13 @@ endif ()
|
|||||||
|
|
||||||
target_compile_definitions(_jemalloc PRIVATE -DJEMALLOC_PROF=1)
|
target_compile_definitions(_jemalloc PRIVATE -DJEMALLOC_PROF=1)
|
||||||
|
|
||||||
if (USE_UNWIND)
|
# jemalloc provides support for two different libunwind flavors: the original HP libunwind and the one coming with gcc / g++ / libstdc++.
|
||||||
# jemalloc provides support for two different libunwind flavors: the original HP libunwind and the one coming with gcc / g++ / libstdc++.
|
# The latter is identified by `JEMALLOC_PROF_LIBGCC` and uses `_Unwind_Backtrace` method instead of `unw_backtrace`.
|
||||||
# The latter is identified by `JEMALLOC_PROF_LIBGCC` and uses `_Unwind_Backtrace` method instead of `unw_backtrace`.
|
# At the time ClickHouse uses LLVM libunwind which follows libgcc's way of backtracking.
|
||||||
# At the time ClickHouse uses LLVM libunwind which follows libgcc's way of backtracing.
|
#
|
||||||
|
# ClickHouse has to provide `unw_backtrace` method by the means of [commit 8e2b31e](https://github.com/ClickHouse/libunwind/commit/8e2b31e766dd502f6df74909e04a7dbdf5182eb1).
|
||||||
# ClickHouse has to provide `unw_backtrace` method by the means of [commit 8e2b31e](https://github.com/ClickHouse/libunwind/commit/8e2b31e766dd502f6df74909e04a7dbdf5182eb1).
|
target_compile_definitions (_jemalloc PRIVATE -DJEMALLOC_PROF_LIBGCC=1)
|
||||||
|
target_link_libraries (_jemalloc PRIVATE unwind)
|
||||||
target_compile_definitions (_jemalloc PRIVATE -DJEMALLOC_PROF_LIBGCC=1)
|
|
||||||
target_link_libraries (_jemalloc PRIVATE unwind)
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
# for RTLD_NEXT
|
# for RTLD_NEXT
|
||||||
target_compile_options(_jemalloc PRIVATE -D_GNU_SOURCE)
|
target_compile_options(_jemalloc PRIVATE -D_GNU_SOURCE)
|
||||||
|
@ -0,0 +1,435 @@
|
|||||||
|
/* include/jemalloc/internal/jemalloc_internal_defs.h. Generated from jemalloc_internal_defs.h.in by configure. */
|
||||||
|
#ifndef JEMALLOC_INTERNAL_DEFS_H_
|
||||||
|
#define JEMALLOC_INTERNAL_DEFS_H_
|
||||||
|
/*
|
||||||
|
* If JEMALLOC_PREFIX is defined via --with-jemalloc-prefix, it will cause all
|
||||||
|
* public APIs to be prefixed. This makes it possible, with some care, to use
|
||||||
|
* multiple allocators simultaneously.
|
||||||
|
*/
|
||||||
|
/* #undef JEMALLOC_PREFIX */
|
||||||
|
/* #undef JEMALLOC_CPREFIX */
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Define overrides for non-standard allocator-related functions if they are
|
||||||
|
* present on the system.
|
||||||
|
*/
|
||||||
|
#define JEMALLOC_OVERRIDE___LIBC_CALLOC
|
||||||
|
#define JEMALLOC_OVERRIDE___LIBC_FREE
|
||||||
|
#define JEMALLOC_OVERRIDE___LIBC_MALLOC
|
||||||
|
#define JEMALLOC_OVERRIDE___LIBC_MEMALIGN
|
||||||
|
#define JEMALLOC_OVERRIDE___LIBC_REALLOC
|
||||||
|
#define JEMALLOC_OVERRIDE___LIBC_VALLOC
|
||||||
|
#define JEMALLOC_OVERRIDE___LIBC_PVALLOC
|
||||||
|
/* #undef JEMALLOC_OVERRIDE___POSIX_MEMALIGN */
|
||||||
|
|
||||||
|
/*
|
||||||
|
* JEMALLOC_PRIVATE_NAMESPACE is used as a prefix for all library-private APIs.
|
||||||
|
* For shared libraries, symbol visibility mechanisms prevent these symbols
|
||||||
|
* from being exported, but for static libraries, naming collisions are a real
|
||||||
|
* possibility.
|
||||||
|
*/
|
||||||
|
#define JEMALLOC_PRIVATE_NAMESPACE je_
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Hyper-threaded CPUs may need a special instruction inside spin loops in
|
||||||
|
* order to yield to another virtual CPU.
|
||||||
|
*/
|
||||||
|
#define CPU_SPINWAIT
|
||||||
|
/* 1 if CPU_SPINWAIT is defined, 0 otherwise. */
|
||||||
|
#define HAVE_CPU_SPINWAIT 0
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Number of significant bits in virtual addresses. This may be less than the
|
||||||
|
* total number of bits in a pointer, e.g. on x64, for which the uppermost 16
|
||||||
|
* bits are the same as bit 47.
|
||||||
|
*/
|
||||||
|
#define LG_VADDR 64
|
||||||
|
|
||||||
|
/* Defined if C11 atomics are available. */
|
||||||
|
#define JEMALLOC_C11_ATOMICS
|
||||||
|
|
||||||
|
/* Defined if GCC __atomic atomics are available. */
|
||||||
|
#define JEMALLOC_GCC_ATOMIC_ATOMICS
|
||||||
|
/* and the 8-bit variant support. */
|
||||||
|
#define JEMALLOC_GCC_U8_ATOMIC_ATOMICS
|
||||||
|
|
||||||
|
/* Defined if GCC __sync atomics are available. */
|
||||||
|
#define JEMALLOC_GCC_SYNC_ATOMICS
|
||||||
|
/* and the 8-bit variant support. */
|
||||||
|
#define JEMALLOC_GCC_U8_SYNC_ATOMICS
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Defined if __builtin_clz() and __builtin_clzl() are available.
|
||||||
|
*/
|
||||||
|
#define JEMALLOC_HAVE_BUILTIN_CLZ
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Defined if os_unfair_lock_*() functions are available, as provided by Darwin.
|
||||||
|
*/
|
||||||
|
/* #undef JEMALLOC_OS_UNFAIR_LOCK */
|
||||||
|
|
||||||
|
/* Defined if syscall(2) is usable. */
|
||||||
|
#define JEMALLOC_USE_SYSCALL
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Defined if secure_getenv(3) is available.
|
||||||
|
*/
|
||||||
|
#define JEMALLOC_HAVE_SECURE_GETENV
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Defined if issetugid(2) is available.
|
||||||
|
*/
|
||||||
|
/* #undef JEMALLOC_HAVE_ISSETUGID */
|
||||||
|
|
||||||
|
/* Defined if pthread_atfork(3) is available. */
|
||||||
|
#define JEMALLOC_HAVE_PTHREAD_ATFORK
|
||||||
|
|
||||||
|
/* Defined if pthread_setname_np(3) is available. */
|
||||||
|
#define JEMALLOC_HAVE_PTHREAD_SETNAME_NP
|
||||||
|
|
||||||
|
/* Defined if pthread_getname_np(3) is available. */
|
||||||
|
#define JEMALLOC_HAVE_PTHREAD_GETNAME_NP
|
||||||
|
|
||||||
|
/* Defined if pthread_get_name_np(3) is available. */
|
||||||
|
/* #undef JEMALLOC_HAVE_PTHREAD_GET_NAME_NP */
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Defined if clock_gettime(CLOCK_MONOTONIC_COARSE, ...) is available.
|
||||||
|
*/
|
||||||
|
#define JEMALLOC_HAVE_CLOCK_MONOTONIC_COARSE
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Defined if clock_gettime(CLOCK_MONOTONIC, ...) is available.
|
||||||
|
*/
|
||||||
|
#define JEMALLOC_HAVE_CLOCK_MONOTONIC
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Defined if mach_absolute_time() is available.
|
||||||
|
*/
|
||||||
|
/* #undef JEMALLOC_HAVE_MACH_ABSOLUTE_TIME */
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Defined if clock_gettime(CLOCK_REALTIME, ...) is available.
|
||||||
|
*/
|
||||||
|
#define JEMALLOC_HAVE_CLOCK_REALTIME
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Defined if _malloc_thread_cleanup() exists. At least in the case of
|
||||||
|
* FreeBSD, pthread_key_create() allocates, which if used during malloc
|
||||||
|
* bootstrapping will cause recursion into the pthreads library. Therefore, if
|
||||||
|
* _malloc_thread_cleanup() exists, use it as the basis for thread cleanup in
|
||||||
|
* malloc_tsd.
|
||||||
|
*/
|
||||||
|
/* #undef JEMALLOC_MALLOC_THREAD_CLEANUP */
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Defined if threaded initialization is known to be safe on this platform.
|
||||||
|
* Among other things, it must be possible to initialize a mutex without
|
||||||
|
* triggering allocation in order for threaded allocation to be safe.
|
||||||
|
*/
|
||||||
|
#define JEMALLOC_THREADED_INIT
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Defined if the pthreads implementation defines
|
||||||
|
* _pthread_mutex_init_calloc_cb(), in which case the function is used in order
|
||||||
|
* to avoid recursive allocation during mutex initialization.
|
||||||
|
*/
|
||||||
|
/* #undef JEMALLOC_MUTEX_INIT_CB */
|
||||||
|
|
||||||
|
/* Non-empty if the tls_model attribute is supported. */
|
||||||
|
#define JEMALLOC_TLS_MODEL __attribute__((tls_model("initial-exec")))
|
||||||
|
|
||||||
|
/*
|
||||||
|
* JEMALLOC_DEBUG enables assertions and other sanity checks, and disables
|
||||||
|
* inline functions.
|
||||||
|
*/
|
||||||
|
/* #undef JEMALLOC_DEBUG */
|
||||||
|
|
||||||
|
/* JEMALLOC_STATS enables statistics calculation. */
|
||||||
|
#define JEMALLOC_STATS
|
||||||
|
|
||||||
|
/* JEMALLOC_EXPERIMENTAL_SMALLOCX_API enables experimental smallocx API. */
|
||||||
|
/* #undef JEMALLOC_EXPERIMENTAL_SMALLOCX_API */
|
||||||
|
|
||||||
|
/* JEMALLOC_PROF enables allocation profiling. */
|
||||||
|
/* #undef JEMALLOC_PROF */
|
||||||
|
|
||||||
|
/* Use libunwind for profile backtracing if defined. */
|
||||||
|
/* #undef JEMALLOC_PROF_LIBUNWIND */
|
||||||
|
|
||||||
|
/* Use libgcc for profile backtracing if defined. */
|
||||||
|
/* #undef JEMALLOC_PROF_LIBGCC */
|
||||||
|
|
||||||
|
/* Use gcc intrinsics for profile backtracing if defined. */
|
||||||
|
/* #undef JEMALLOC_PROF_GCC */
|
||||||
|
|
||||||
|
/* JEMALLOC_PAGEID enabled page id */
|
||||||
|
/* #undef JEMALLOC_PAGEID */
|
||||||
|
|
||||||
|
/* JEMALLOC_HAVE_PRCTL checks prctl */
|
||||||
|
#define JEMALLOC_HAVE_PRCTL
|
||||||
|
|
||||||
|
/*
|
||||||
|
* JEMALLOC_DSS enables use of sbrk(2) to allocate extents from the data storage
|
||||||
|
* segment (DSS).
|
||||||
|
*/
|
||||||
|
#define JEMALLOC_DSS
|
||||||
|
|
||||||
|
/* Support memory filling (junk/zero). */
|
||||||
|
#define JEMALLOC_FILL
|
||||||
|
|
||||||
|
/* Support utrace(2)-based tracing. */
|
||||||
|
/* #undef JEMALLOC_UTRACE */
|
||||||
|
|
||||||
|
/* Support utrace(2)-based tracing (label based signature). */
|
||||||
|
/* #undef JEMALLOC_UTRACE_LABEL */
|
||||||
|
|
||||||
|
/* Support optional abort() on OOM. */
|
||||||
|
/* #undef JEMALLOC_XMALLOC */
|
||||||
|
|
||||||
|
/* Support lazy locking (avoid locking unless a second thread is launched). */
|
||||||
|
/* #undef JEMALLOC_LAZY_LOCK */
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Minimum allocation alignment is 2^LG_QUANTUM bytes (ignoring tiny size
|
||||||
|
* classes).
|
||||||
|
*/
|
||||||
|
/* #undef LG_QUANTUM */
|
||||||
|
|
||||||
|
/* One page is 2^LG_PAGE bytes. */
|
||||||
|
#define LG_PAGE 12
|
||||||
|
|
||||||
|
/* Maximum number of regions in a slab. */
|
||||||
|
/* #undef CONFIG_LG_SLAB_MAXREGS */
|
||||||
|
|
||||||
|
/*
|
||||||
|
* One huge page is 2^LG_HUGEPAGE bytes. Note that this is defined even if the
|
||||||
|
* system does not explicitly support huge pages; system calls that require
|
||||||
|
* explicit huge page support are separately configured.
|
||||||
|
*/
|
||||||
|
#define LG_HUGEPAGE 20
|
||||||
|
|
||||||
|
/*
|
||||||
|
* If defined, adjacent virtual memory mappings with identical attributes
|
||||||
|
* automatically coalesce, and they fragment when changes are made to subranges.
|
||||||
|
* This is the normal order of things for mmap()/munmap(), but on Windows
|
||||||
|
* VirtualAlloc()/VirtualFree() operations must be precisely matched, i.e.
|
||||||
|
* mappings do *not* coalesce/fragment.
|
||||||
|
*/
|
||||||
|
#define JEMALLOC_MAPS_COALESCE
|
||||||
|
|
||||||
|
/*
|
||||||
|
* If defined, retain memory for later reuse by default rather than using e.g.
|
||||||
|
* munmap() to unmap freed extents. This is enabled on 64-bit Linux because
|
||||||
|
* common sequences of mmap()/munmap() calls will cause virtual memory map
|
||||||
|
* holes.
|
||||||
|
*/
|
||||||
|
#define JEMALLOC_RETAIN
|
||||||
|
|
||||||
|
/* TLS is used to map arenas and magazine caches to threads. */
|
||||||
|
#define JEMALLOC_TLS
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Used to mark unreachable code to quiet "end of non-void" compiler warnings.
|
||||||
|
* Don't use this directly; instead use unreachable() from util.h
|
||||||
|
*/
|
||||||
|
#define JEMALLOC_INTERNAL_UNREACHABLE __builtin_unreachable
|
||||||
|
|
||||||
|
/*
|
||||||
|
* ffs*() functions to use for bitmapping. Don't use these directly; instead,
|
||||||
|
* use ffs_*() from util.h.
|
||||||
|
*/
|
||||||
|
#define JEMALLOC_INTERNAL_FFSLL __builtin_ffsll
|
||||||
|
#define JEMALLOC_INTERNAL_FFSL __builtin_ffsl
|
||||||
|
#define JEMALLOC_INTERNAL_FFS __builtin_ffs
|
||||||
|
|
||||||
|
/*
|
||||||
|
* popcount*() functions to use for bitmapping.
|
||||||
|
*/
|
||||||
|
#define JEMALLOC_INTERNAL_POPCOUNTL __builtin_popcountl
|
||||||
|
#define JEMALLOC_INTERNAL_POPCOUNT __builtin_popcount
|
||||||
|
|
||||||
|
/*
|
||||||
|
* If defined, explicitly attempt to more uniformly distribute large allocation
|
||||||
|
* pointer alignments across all cache indices.
|
||||||
|
*/
|
||||||
|
#define JEMALLOC_CACHE_OBLIVIOUS
|
||||||
|
|
||||||
|
/*
|
||||||
|
* If defined, enable logging facilities. We make this a configure option to
|
||||||
|
* avoid taking extra branches everywhere.
|
||||||
|
*/
|
||||||
|
/* #undef JEMALLOC_LOG */
|
||||||
|
|
||||||
|
/*
|
||||||
|
* If defined, use readlinkat() (instead of readlink()) to follow
|
||||||
|
* /etc/malloc_conf.
|
||||||
|
*/
|
||||||
|
/* #undef JEMALLOC_READLINKAT */
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Darwin (OS X) uses zones to work around Mach-O symbol override shortcomings.
|
||||||
|
*/
|
||||||
|
/* #undef JEMALLOC_ZONE */
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Methods for determining whether the OS overcommits.
|
||||||
|
* JEMALLOC_PROC_SYS_VM_OVERCOMMIT_MEMORY: Linux's
|
||||||
|
* /proc/sys/vm.overcommit_memory file.
|
||||||
|
* JEMALLOC_SYSCTL_VM_OVERCOMMIT: FreeBSD's vm.overcommit sysctl.
|
||||||
|
*/
|
||||||
|
/* #undef JEMALLOC_SYSCTL_VM_OVERCOMMIT */
|
||||||
|
#define JEMALLOC_PROC_SYS_VM_OVERCOMMIT_MEMORY
|
||||||
|
|
||||||
|
/* Defined if madvise(2) is available. */
|
||||||
|
#define JEMALLOC_HAVE_MADVISE
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Defined if transparent huge pages are supported via the MADV_[NO]HUGEPAGE
|
||||||
|
* arguments to madvise(2).
|
||||||
|
*/
|
||||||
|
#define JEMALLOC_HAVE_MADVISE_HUGE
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Methods for purging unused pages differ between operating systems.
|
||||||
|
*
|
||||||
|
* madvise(..., MADV_FREE) : This marks pages as being unused, such that they
|
||||||
|
* will be discarded rather than swapped out.
|
||||||
|
* madvise(..., MADV_DONTNEED) : If JEMALLOC_PURGE_MADVISE_DONTNEED_ZEROS is
|
||||||
|
* defined, this immediately discards pages,
|
||||||
|
* such that new pages will be demand-zeroed if
|
||||||
|
* the address region is later touched;
|
||||||
|
* otherwise this behaves similarly to
|
||||||
|
* MADV_FREE, though typically with higher
|
||||||
|
* system overhead.
|
||||||
|
*/
|
||||||
|
#define JEMALLOC_PURGE_MADVISE_FREE
|
||||||
|
#define JEMALLOC_PURGE_MADVISE_DONTNEED
|
||||||
|
#define JEMALLOC_PURGE_MADVISE_DONTNEED_ZEROS
|
||||||
|
|
||||||
|
/* Defined if madvise(2) is available but MADV_FREE is not (x86 Linux only). */
|
||||||
|
/* #undef JEMALLOC_DEFINE_MADVISE_FREE */
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Defined if MADV_DO[NT]DUMP is supported as an argument to madvise.
|
||||||
|
*/
|
||||||
|
#define JEMALLOC_MADVISE_DONTDUMP
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Defined if MADV_[NO]CORE is supported as an argument to madvise.
|
||||||
|
*/
|
||||||
|
/* #undef JEMALLOC_MADVISE_NOCORE */
|
||||||
|
|
||||||
|
/* Defined if mprotect(2) is available. */
|
||||||
|
#define JEMALLOC_HAVE_MPROTECT
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Defined if transparent huge pages (THPs) are supported via the
|
||||||
|
* MADV_[NO]HUGEPAGE arguments to madvise(2), and THP support is enabled.
|
||||||
|
*/
|
||||||
|
/* #undef JEMALLOC_THP */
|
||||||
|
|
||||||
|
/* Defined if posix_madvise is available. */
|
||||||
|
/* #undef JEMALLOC_HAVE_POSIX_MADVISE */
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Method for purging unused pages using posix_madvise.
|
||||||
|
*
|
||||||
|
* posix_madvise(..., POSIX_MADV_DONTNEED)
|
||||||
|
*/
|
||||||
|
/* #undef JEMALLOC_PURGE_POSIX_MADVISE_DONTNEED */
|
||||||
|
/* #undef JEMALLOC_PURGE_POSIX_MADVISE_DONTNEED_ZEROS */
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Defined if memcntl page admin call is supported
|
||||||
|
*/
|
||||||
|
/* #undef JEMALLOC_HAVE_MEMCNTL */
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Defined if malloc_size is supported
|
||||||
|
*/
|
||||||
|
/* #undef JEMALLOC_HAVE_MALLOC_SIZE */
|
||||||
|
|
||||||
|
/* Define if operating system has alloca.h header. */
|
||||||
|
#define JEMALLOC_HAS_ALLOCA_H
|
||||||
|
|
||||||
|
/* C99 restrict keyword supported. */
|
||||||
|
#define JEMALLOC_HAS_RESTRICT
|
||||||
|
|
||||||
|
/* For use by hash code. */
|
||||||
|
#define JEMALLOC_BIG_ENDIAN
|
||||||
|
|
||||||
|
/* sizeof(int) == 2^LG_SIZEOF_INT. */
|
||||||
|
#define LG_SIZEOF_INT 2
|
||||||
|
|
||||||
|
/* sizeof(long) == 2^LG_SIZEOF_LONG. */
|
||||||
|
#define LG_SIZEOF_LONG 3
|
||||||
|
|
||||||
|
/* sizeof(long long) == 2^LG_SIZEOF_LONG_LONG. */
|
||||||
|
#define LG_SIZEOF_LONG_LONG 3
|
||||||
|
|
||||||
|
/* sizeof(intmax_t) == 2^LG_SIZEOF_INTMAX_T. */
|
||||||
|
#define LG_SIZEOF_INTMAX_T 3
|
||||||
|
|
||||||
|
/* glibc malloc hooks (__malloc_hook, __realloc_hook, __free_hook). */
|
||||||
|
/* #undef JEMALLOC_GLIBC_MALLOC_HOOK */
|
||||||
|
|
||||||
|
/* glibc memalign hook. */
|
||||||
|
/* #undef JEMALLOC_GLIBC_MEMALIGN_HOOK */
|
||||||
|
|
||||||
|
/* pthread support */
|
||||||
|
#define JEMALLOC_HAVE_PTHREAD
|
||||||
|
|
||||||
|
/* dlsym() support */
|
||||||
|
#define JEMALLOC_HAVE_DLSYM
|
||||||
|
|
||||||
|
/* Adaptive mutex support in pthreads. */
|
||||||
|
#define JEMALLOC_HAVE_PTHREAD_MUTEX_ADAPTIVE_NP
|
||||||
|
|
||||||
|
/* GNU specific sched_getcpu support */
|
||||||
|
#define JEMALLOC_HAVE_SCHED_GETCPU
|
||||||
|
|
||||||
|
/* GNU specific sched_setaffinity support */
|
||||||
|
#define JEMALLOC_HAVE_SCHED_SETAFFINITY
|
||||||
|
|
||||||
|
/*
|
||||||
|
* If defined, all the features necessary for background threads are present.
|
||||||
|
*/
|
||||||
|
#define JEMALLOC_BACKGROUND_THREAD
|
||||||
|
|
||||||
|
/*
|
||||||
|
* If defined, jemalloc symbols are not exported (doesn't work when
|
||||||
|
* JEMALLOC_PREFIX is not defined).
|
||||||
|
*/
|
||||||
|
/* #undef JEMALLOC_EXPORT */
|
||||||
|
|
||||||
|
/* config.malloc_conf options string. */
|
||||||
|
#define JEMALLOC_CONFIG_MALLOC_CONF ""
|
||||||
|
|
||||||
|
/* If defined, jemalloc takes the malloc/free/etc. symbol names. */
|
||||||
|
#define JEMALLOC_IS_MALLOC
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Defined if strerror_r returns char * if _GNU_SOURCE is defined.
|
||||||
|
*/
|
||||||
|
#define JEMALLOC_STRERROR_R_RETURNS_CHAR_WITH_GNU_SOURCE
|
||||||
|
|
||||||
|
/* Performs additional safety checks when defined. */
|
||||||
|
/* #undef JEMALLOC_OPT_SAFETY_CHECKS */
|
||||||
|
|
||||||
|
/* Is C++ support being built? */
|
||||||
|
#define JEMALLOC_ENABLE_CXX
|
||||||
|
|
||||||
|
/* Performs additional size checks when defined. */
|
||||||
|
/* #undef JEMALLOC_OPT_SIZE_CHECKS */
|
||||||
|
|
||||||
|
/* Allows sampled junk and stash for checking use-after-free when defined. */
|
||||||
|
/* #undef JEMALLOC_UAF_DETECTION */
|
||||||
|
|
||||||
|
/* Darwin VM_MAKE_TAG support */
|
||||||
|
/* #undef JEMALLOC_HAVE_VM_MAKE_TAG */
|
||||||
|
|
||||||
|
/* If defined, realloc(ptr, 0) defaults to "free" instead of "alloc". */
|
||||||
|
#define JEMALLOC_ZERO_REALLOC_DEFAULT_FREE
|
||||||
|
|
||||||
|
#endif /* JEMALLOC_INTERNAL_DEFS_H_ */
|
@ -61,9 +61,7 @@ target_include_directories(cxx SYSTEM BEFORE PUBLIC $<$<COMPILE_LANGUAGE:CXX>:$
|
|||||||
target_compile_definitions(cxx PRIVATE -D_LIBCPP_BUILDING_LIBRARY -DLIBCXX_BUILDING_LIBCXXABI)
|
target_compile_definitions(cxx PRIVATE -D_LIBCPP_BUILDING_LIBRARY -DLIBCXX_BUILDING_LIBCXXABI)
|
||||||
|
|
||||||
# Enable capturing stack traces for all exceptions.
|
# Enable capturing stack traces for all exceptions.
|
||||||
if (USE_UNWIND)
|
target_compile_definitions(cxx PUBLIC -DSTD_EXCEPTION_HAS_STACK_TRACE=1)
|
||||||
target_compile_definitions(cxx PUBLIC -DSTD_EXCEPTION_HAS_STACK_TRACE=1)
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
if (USE_MUSL)
|
if (USE_MUSL)
|
||||||
target_compile_definitions(cxx PUBLIC -D_LIBCPP_HAS_MUSL_LIBC=1)
|
target_compile_definitions(cxx PUBLIC -D_LIBCPP_HAS_MUSL_LIBC=1)
|
||||||
|
@ -35,12 +35,10 @@ target_include_directories(cxxabi SYSTEM BEFORE
|
|||||||
)
|
)
|
||||||
target_compile_definitions(cxxabi PRIVATE -D_LIBCPP_BUILDING_LIBRARY)
|
target_compile_definitions(cxxabi PRIVATE -D_LIBCPP_BUILDING_LIBRARY)
|
||||||
target_compile_options(cxxabi PRIVATE -nostdinc++ -fno-sanitize=undefined -Wno-macro-redefined) # If we don't disable UBSan, infinite recursion happens in dynamic_cast.
|
target_compile_options(cxxabi PRIVATE -nostdinc++ -fno-sanitize=undefined -Wno-macro-redefined) # If we don't disable UBSan, infinite recursion happens in dynamic_cast.
|
||||||
target_link_libraries(cxxabi PUBLIC ${EXCEPTION_HANDLING_LIBRARY})
|
target_link_libraries(cxxabi PUBLIC unwind)
|
||||||
|
|
||||||
# Enable capturing stack traces for all exceptions.
|
# Enable capturing stack traces for all exceptions.
|
||||||
if (USE_UNWIND)
|
target_compile_definitions(cxxabi PUBLIC -DSTD_EXCEPTION_HAS_STACK_TRACE=1)
|
||||||
target_compile_definitions(cxxabi PUBLIC -DSTD_EXCEPTION_HAS_STACK_TRACE=1)
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
install(
|
install(
|
||||||
TARGETS cxxabi
|
TARGETS cxxabi
|
||||||
|
2
contrib/libhdfs3
vendored
2
contrib/libhdfs3
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 164b89253fad7991bce77882f01b51ab81d19f3d
|
Subproject commit 377220ef351ae24994a5fcd2b5fa3930d00c4db0
|
@ -1,11 +1,11 @@
|
|||||||
if(NOT ARCH_AARCH64 AND NOT OS_FREEBSD AND NOT APPLE AND NOT ARCH_PPC64LE AND NOT ARCH_S390X)
|
if(NOT OS_FREEBSD AND NOT APPLE AND NOT ARCH_PPC64LE AND NOT ARCH_S390X)
|
||||||
option(ENABLE_HDFS "Enable HDFS" ${ENABLE_LIBRARIES})
|
option(ENABLE_HDFS "Enable HDFS" ${ENABLE_LIBRARIES})
|
||||||
elseif(ENABLE_HDFS)
|
elseif(ENABLE_HDFS)
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use HDFS3 with current configuration")
|
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use HDFS3 with current configuration")
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if(NOT ENABLE_HDFS)
|
if(NOT ENABLE_HDFS)
|
||||||
message(STATUS "Not using hdfs")
|
message(STATUS "Not using HDFS")
|
||||||
return()
|
return()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
|
@ -1,15 +0,0 @@
|
|||||||
include(${ClickHouse_SOURCE_DIR}/cmake/embed_binary.cmake)
|
|
||||||
|
|
||||||
set(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/nlp-data")
|
|
||||||
|
|
||||||
add_library (_nlp_data INTERFACE)
|
|
||||||
|
|
||||||
clickhouse_embed_binaries(
|
|
||||||
TARGET nlp_dictionaries
|
|
||||||
RESOURCE_DIR "${LIBRARY_DIR}"
|
|
||||||
RESOURCES charset.zst tonality_ru.zst programming.zst
|
|
||||||
)
|
|
||||||
|
|
||||||
add_dependencies(_nlp_data nlp_dictionaries)
|
|
||||||
target_link_libraries(_nlp_data INTERFACE "-Wl,${WHOLE_ARCHIVE} $<TARGET_FILE:nlp_dictionaries> -Wl,${NO_WHOLE_ARCHIVE}")
|
|
||||||
add_library(ch_contrib::nlp_data ALIAS _nlp_data)
|
|
2
contrib/orc
vendored
2
contrib/orc
vendored
@ -1 +1 @@
|
|||||||
Subproject commit c5d7755ba0b9a95631c8daea4d094101f26ec761
|
Subproject commit 568d1d60c250af1890f226c182bc15bd8cc94cf1
|
2
contrib/qpl
vendored
2
contrib/qpl
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 3f8f5cea27739f5261e8fd577dc233ffe88bf679
|
Subproject commit faaf19350459c076e66bb5df11743c3fade59b73
|
2
contrib/re2
vendored
2
contrib/re2
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 13ebb377c6ad763ca61d12dd6f88b1126bd0b911
|
Subproject commit 03da4fc0857c285e3a26782f6bc8931c4c950df4
|
@ -12,6 +12,7 @@ endif()
|
|||||||
set(SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/re2")
|
set(SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/re2")
|
||||||
|
|
||||||
set(RE2_SOURCES
|
set(RE2_SOURCES
|
||||||
|
${SRC_DIR}/re2/bitmap256.cc
|
||||||
${SRC_DIR}/re2/bitstate.cc
|
${SRC_DIR}/re2/bitstate.cc
|
||||||
${SRC_DIR}/re2/compile.cc
|
${SRC_DIR}/re2/compile.cc
|
||||||
${SRC_DIR}/re2/dfa.cc
|
${SRC_DIR}/re2/dfa.cc
|
||||||
@ -28,15 +29,16 @@ set(RE2_SOURCES
|
|||||||
${SRC_DIR}/re2/regexp.cc
|
${SRC_DIR}/re2/regexp.cc
|
||||||
${SRC_DIR}/re2/set.cc
|
${SRC_DIR}/re2/set.cc
|
||||||
${SRC_DIR}/re2/simplify.cc
|
${SRC_DIR}/re2/simplify.cc
|
||||||
${SRC_DIR}/re2/stringpiece.cc
|
|
||||||
${SRC_DIR}/re2/tostring.cc
|
${SRC_DIR}/re2/tostring.cc
|
||||||
${SRC_DIR}/re2/unicode_casefold.cc
|
${SRC_DIR}/re2/unicode_casefold.cc
|
||||||
${SRC_DIR}/re2/unicode_groups.cc
|
${SRC_DIR}/re2/unicode_groups.cc
|
||||||
|
${SRC_DIR}/util/pcre.cc
|
||||||
${SRC_DIR}/util/rune.cc
|
${SRC_DIR}/util/rune.cc
|
||||||
${SRC_DIR}/util/strutil.cc
|
${SRC_DIR}/util/strutil.cc
|
||||||
)
|
)
|
||||||
add_library(re2 ${RE2_SOURCES})
|
add_library(re2 ${RE2_SOURCES})
|
||||||
target_include_directories(re2 PUBLIC "${SRC_DIR}")
|
target_include_directories(re2 PUBLIC "${SRC_DIR}")
|
||||||
|
target_link_libraries(re2 ch_contrib::abseil_str_format)
|
||||||
|
|
||||||
# Building re2 which is thread-safe and re2_st which is not.
|
# Building re2 which is thread-safe and re2_st which is not.
|
||||||
# re2 changes its state during matching of regular expression, e.g. creates temporary DFA.
|
# re2 changes its state during matching of regular expression, e.g. creates temporary DFA.
|
||||||
@ -48,6 +50,7 @@ target_compile_definitions (re2_st PRIVATE NDEBUG NO_THREADS re2=re2_st)
|
|||||||
target_include_directories (re2_st PRIVATE .)
|
target_include_directories (re2_st PRIVATE .)
|
||||||
target_include_directories (re2_st SYSTEM PUBLIC ${CMAKE_CURRENT_BINARY_DIR})
|
target_include_directories (re2_st SYSTEM PUBLIC ${CMAKE_CURRENT_BINARY_DIR})
|
||||||
target_include_directories (re2_st SYSTEM BEFORE PUBLIC ${SRC_DIR})
|
target_include_directories (re2_st SYSTEM BEFORE PUBLIC ${SRC_DIR})
|
||||||
|
target_link_libraries (re2_st ch_contrib::abseil_str_format)
|
||||||
|
|
||||||
file (MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/re2_st)
|
file (MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/re2_st)
|
||||||
foreach (FILENAME filtered_re2.h re2.h set.h stringpiece.h)
|
foreach (FILENAME filtered_re2.h re2.h set.h stringpiece.h)
|
||||||
@ -60,17 +63,6 @@ foreach (FILENAME filtered_re2.h re2.h set.h stringpiece.h)
|
|||||||
add_dependencies (re2_st transform_${FILENAME})
|
add_dependencies (re2_st transform_${FILENAME})
|
||||||
endforeach ()
|
endforeach ()
|
||||||
|
|
||||||
file (MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/util)
|
|
||||||
foreach (FILENAME mutex.h)
|
|
||||||
add_custom_command (OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/util/${FILENAME}"
|
|
||||||
COMMAND ${CMAKE_COMMAND} -DSOURCE_FILENAME="${SRC_DIR}/util/${FILENAME}"
|
|
||||||
-DTARGET_FILENAME="${CMAKE_CURRENT_BINARY_DIR}/util/${FILENAME}"
|
|
||||||
-P "${CMAKE_CURRENT_SOURCE_DIR}/re2_transform.cmake"
|
|
||||||
COMMENT "Creating ${FILENAME} for re2_st library.")
|
|
||||||
add_custom_target (transform_${FILENAME} DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/util/${FILENAME}")
|
|
||||||
add_dependencies (re2_st transform_${FILENAME})
|
|
||||||
endforeach ()
|
|
||||||
|
|
||||||
# NOTE: you should not change name of library here, since it is used to generate required header (see above)
|
# NOTE: you should not change name of library here, since it is used to generate required header (see above)
|
||||||
add_library(ch_contrib::re2 ALIAS re2)
|
add_library(ch_contrib::re2 ALIAS re2)
|
||||||
add_library(ch_contrib::re2_st ALIAS re2_st)
|
add_library(ch_contrib::re2_st ALIAS re2_st)
|
||||||
|
@ -120,11 +120,12 @@
|
|||||||
"docker/test/base": {
|
"docker/test/base": {
|
||||||
"name": "clickhouse/test-base",
|
"name": "clickhouse/test-base",
|
||||||
"dependent": [
|
"dependent": [
|
||||||
"docker/test/stateless",
|
|
||||||
"docker/test/integration/base",
|
|
||||||
"docker/test/fuzzer",
|
"docker/test/fuzzer",
|
||||||
|
"docker/test/integration/base",
|
||||||
"docker/test/keeper-jepsen",
|
"docker/test/keeper-jepsen",
|
||||||
"docker/test/server-jepsen"
|
"docker/test/server-jepsen",
|
||||||
|
"docker/test/sqllogic",
|
||||||
|
"docker/test/stateless"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"docker/test/integration/kerberized_hadoop": {
|
"docker/test/integration/kerberized_hadoop": {
|
||||||
|
@ -32,7 +32,7 @@ RUN arch=${TARGETARCH:-amd64} \
|
|||||||
esac
|
esac
|
||||||
|
|
||||||
ARG REPOSITORY="https://s3.amazonaws.com/clickhouse-builds/22.4/31c367d3cd3aefd316778601ff6565119fe36682/package_release"
|
ARG REPOSITORY="https://s3.amazonaws.com/clickhouse-builds/22.4/31c367d3cd3aefd316778601ff6565119fe36682/package_release"
|
||||||
ARG VERSION="23.5.2.7"
|
ARG VERSION="23.6.2.18"
|
||||||
ARG PACKAGES="clickhouse-keeper"
|
ARG PACKAGES="clickhouse-keeper"
|
||||||
|
|
||||||
# user/group precreated explicitly with fixed uid/gid on purpose.
|
# user/group precreated explicitly with fixed uid/gid on purpose.
|
||||||
|
@ -49,8 +49,8 @@ ENV CARGO_HOME=/rust/cargo
|
|||||||
ENV PATH="/rust/cargo/bin:${PATH}"
|
ENV PATH="/rust/cargo/bin:${PATH}"
|
||||||
RUN curl https://sh.rustup.rs -sSf | bash -s -- -y && \
|
RUN curl https://sh.rustup.rs -sSf | bash -s -- -y && \
|
||||||
chmod 777 -R /rust && \
|
chmod 777 -R /rust && \
|
||||||
rustup toolchain install nightly && \
|
rustup toolchain install nightly-2023-07-04 && \
|
||||||
rustup default nightly && \
|
rustup default nightly-2023-07-04 && \
|
||||||
rustup component add rust-src && \
|
rustup component add rust-src && \
|
||||||
rustup target add aarch64-unknown-linux-gnu && \
|
rustup target add aarch64-unknown-linux-gnu && \
|
||||||
rustup target add x86_64-apple-darwin && \
|
rustup target add x86_64-apple-darwin && \
|
||||||
@ -58,6 +58,33 @@ RUN curl https://sh.rustup.rs -sSf | bash -s -- -y && \
|
|||||||
rustup target add aarch64-apple-darwin && \
|
rustup target add aarch64-apple-darwin && \
|
||||||
rustup target add powerpc64le-unknown-linux-gnu
|
rustup target add powerpc64le-unknown-linux-gnu
|
||||||
|
|
||||||
|
# Create vendor cache for cargo.
|
||||||
|
#
|
||||||
|
# Note, that the config.toml for the root is used, you will not be able to
|
||||||
|
# install any other crates, except those which had been vendored (since if
|
||||||
|
# there is "replace-with" for some source, then cargo will not look to other
|
||||||
|
# remotes except this).
|
||||||
|
#
|
||||||
|
# Notes for the command itself:
|
||||||
|
# - --chown is required to preserve the rights
|
||||||
|
# - unstable-options for -C
|
||||||
|
# - chmod is required to fix the permissions, since builds are running from a different user
|
||||||
|
# - copy of the Cargo.lock is required for proper dependencies versions
|
||||||
|
# - cargo vendor --sync is requried to overcome [1] bug.
|
||||||
|
#
|
||||||
|
# [1]: https://github.com/rust-lang/wg-cargo-std-aware/issues/23
|
||||||
|
COPY --chown=root:root /rust /rust/packages
|
||||||
|
RUN cargo -Z unstable-options -C /rust/packages vendor > $CARGO_HOME/config.toml && \
|
||||||
|
cp "$(rustc --print=sysroot)"/lib/rustlib/src/rust/Cargo.lock "$(rustc --print=sysroot)"/lib/rustlib/src/rust/library/test/ && \
|
||||||
|
cargo -Z unstable-options -C /rust/packages vendor --sync "$(rustc --print=sysroot)"/lib/rustlib/src/rust/library/test/Cargo.toml && \
|
||||||
|
rm "$(rustc --print=sysroot)"/lib/rustlib/src/rust/library/test/Cargo.lock && \
|
||||||
|
sed -i "s#\"vendor\"#\"/rust/vendor\"#" $CARGO_HOME/config.toml && \
|
||||||
|
cat $CARGO_HOME/config.toml && \
|
||||||
|
mv /rust/packages/vendor /rust/vendor && \
|
||||||
|
chmod -R o=r+X /rust/vendor && \
|
||||||
|
ls -R -l /rust/packages && \
|
||||||
|
rm -r /rust/packages
|
||||||
|
|
||||||
# NOTE: Seems like gcc-11 is too new for ubuntu20 repository
|
# NOTE: Seems like gcc-11 is too new for ubuntu20 repository
|
||||||
# A cross-linker for RISC-V 64 (we need it, because LLVM's LLD does not work):
|
# A cross-linker for RISC-V 64 (we need it, because LLVM's LLD does not work):
|
||||||
RUN add-apt-repository ppa:ubuntu-toolchain-r/test --yes \
|
RUN add-apt-repository ppa:ubuntu-toolchain-r/test --yes \
|
||||||
@ -89,7 +116,7 @@ RUN arch=${TARGETARCH:-amd64} \
|
|||||||
&& dpkg -i /tmp/nfpm.deb \
|
&& dpkg -i /tmp/nfpm.deb \
|
||||||
&& rm /tmp/nfpm.deb
|
&& rm /tmp/nfpm.deb
|
||||||
|
|
||||||
ARG GO_VERSION=1.19.5
|
ARG GO_VERSION=1.19.10
|
||||||
# We need go for clickhouse-diagnostics
|
# We need go for clickhouse-diagnostics
|
||||||
RUN arch=${TARGETARCH:-amd64} \
|
RUN arch=${TARGETARCH:-amd64} \
|
||||||
&& curl -Lo /tmp/go.tgz "https://go.dev/dl/go${GO_VERSION}.linux-${arch}.tar.gz" \
|
&& curl -Lo /tmp/go.tgz "https://go.dev/dl/go${GO_VERSION}.linux-${arch}.tar.gz" \
|
||||||
|
1
docker/packager/binary/rust
Symbolic link
1
docker/packager/binary/rust
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../../../rust
|
@ -138,6 +138,7 @@ def parse_env_variables(
|
|||||||
ARM_V80COMPAT_SUFFIX = "-aarch64-v80compat"
|
ARM_V80COMPAT_SUFFIX = "-aarch64-v80compat"
|
||||||
FREEBSD_SUFFIX = "-freebsd"
|
FREEBSD_SUFFIX = "-freebsd"
|
||||||
PPC_SUFFIX = "-ppc64le"
|
PPC_SUFFIX = "-ppc64le"
|
||||||
|
RISCV_SUFFIX = "-riscv64"
|
||||||
AMD64_COMPAT_SUFFIX = "-amd64-compat"
|
AMD64_COMPAT_SUFFIX = "-amd64-compat"
|
||||||
|
|
||||||
result = []
|
result = []
|
||||||
@ -150,6 +151,7 @@ def parse_env_variables(
|
|||||||
is_cross_arm = compiler.endswith(ARM_SUFFIX)
|
is_cross_arm = compiler.endswith(ARM_SUFFIX)
|
||||||
is_cross_arm_v80compat = compiler.endswith(ARM_V80COMPAT_SUFFIX)
|
is_cross_arm_v80compat = compiler.endswith(ARM_V80COMPAT_SUFFIX)
|
||||||
is_cross_ppc = compiler.endswith(PPC_SUFFIX)
|
is_cross_ppc = compiler.endswith(PPC_SUFFIX)
|
||||||
|
is_cross_riscv = compiler.endswith(RISCV_SUFFIX)
|
||||||
is_cross_freebsd = compiler.endswith(FREEBSD_SUFFIX)
|
is_cross_freebsd = compiler.endswith(FREEBSD_SUFFIX)
|
||||||
is_amd64_compat = compiler.endswith(AMD64_COMPAT_SUFFIX)
|
is_amd64_compat = compiler.endswith(AMD64_COMPAT_SUFFIX)
|
||||||
|
|
||||||
@ -206,6 +208,11 @@ def parse_env_variables(
|
|||||||
cmake_flags.append(
|
cmake_flags.append(
|
||||||
"-DCMAKE_TOOLCHAIN_FILE=/build/cmake/linux/toolchain-ppc64le.cmake"
|
"-DCMAKE_TOOLCHAIN_FILE=/build/cmake/linux/toolchain-ppc64le.cmake"
|
||||||
)
|
)
|
||||||
|
elif is_cross_riscv:
|
||||||
|
cc = compiler[: -len(RISCV_SUFFIX)]
|
||||||
|
cmake_flags.append(
|
||||||
|
"-DCMAKE_TOOLCHAIN_FILE=/build/cmake/linux/toolchain-riscv64.cmake"
|
||||||
|
)
|
||||||
elif is_amd64_compat:
|
elif is_amd64_compat:
|
||||||
cc = compiler[: -len(AMD64_COMPAT_SUFFIX)]
|
cc = compiler[: -len(AMD64_COMPAT_SUFFIX)]
|
||||||
result.append("DEB_ARCH=amd64")
|
result.append("DEB_ARCH=amd64")
|
||||||
@ -370,6 +377,7 @@ def parse_args() -> argparse.Namespace:
|
|||||||
"clang-16-aarch64",
|
"clang-16-aarch64",
|
||||||
"clang-16-aarch64-v80compat",
|
"clang-16-aarch64-v80compat",
|
||||||
"clang-16-ppc64le",
|
"clang-16-ppc64le",
|
||||||
|
"clang-16-riscv64",
|
||||||
"clang-16-amd64-compat",
|
"clang-16-amd64-compat",
|
||||||
"clang-16-freebsd",
|
"clang-16-freebsd",
|
||||||
),
|
),
|
||||||
|
@ -33,7 +33,7 @@ RUN arch=${TARGETARCH:-amd64} \
|
|||||||
# lts / testing / prestable / etc
|
# lts / testing / prestable / etc
|
||||||
ARG REPO_CHANNEL="stable"
|
ARG REPO_CHANNEL="stable"
|
||||||
ARG REPOSITORY="https://packages.clickhouse.com/tgz/${REPO_CHANNEL}"
|
ARG REPOSITORY="https://packages.clickhouse.com/tgz/${REPO_CHANNEL}"
|
||||||
ARG VERSION="23.5.2.7"
|
ARG VERSION="23.6.2.18"
|
||||||
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
||||||
|
|
||||||
# user/group precreated explicitly with fixed uid/gid on purpose.
|
# user/group precreated explicitly with fixed uid/gid on purpose.
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
FROM ubuntu:22.04
|
FROM ubuntu:20.04
|
||||||
|
|
||||||
# see https://github.com/moby/moby/issues/4032#issuecomment-192327844
|
# see https://github.com/moby/moby/issues/4032#issuecomment-192327844
|
||||||
ARG DEBIAN_FRONTEND=noninteractive
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
@ -11,18 +11,19 @@ RUN sed -i "s|http://archive.ubuntu.com|${apt_archive}|g" /etc/apt/sources.list
|
|||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get upgrade -yq \
|
&& apt-get upgrade -yq \
|
||||||
&& apt-get install --yes --no-install-recommends \
|
&& apt-get install --yes --no-install-recommends \
|
||||||
apt-transport-https \
|
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
dirmngr \
|
|
||||||
gnupg2 \
|
|
||||||
wget \
|
|
||||||
locales \
|
locales \
|
||||||
tzdata \
|
tzdata \
|
||||||
&& apt-get clean
|
wget \
|
||||||
|
&& apt-get clean \
|
||||||
|
&& rm -rf \
|
||||||
|
/var/lib/apt/lists/* \
|
||||||
|
/var/cache/debconf \
|
||||||
|
/tmp/*
|
||||||
|
|
||||||
ARG REPO_CHANNEL="stable"
|
ARG REPO_CHANNEL="stable"
|
||||||
ARG REPOSITORY="deb https://packages.clickhouse.com/deb ${REPO_CHANNEL} main"
|
ARG REPOSITORY="deb [signed-by=/usr/share/keyrings/clickhouse-keyring.gpg] https://packages.clickhouse.com/deb ${REPO_CHANNEL} main"
|
||||||
ARG VERSION="23.5.2.7"
|
ARG VERSION="23.6.2.18"
|
||||||
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
||||||
|
|
||||||
# set non-empty deb_location_url url to create a docker image
|
# set non-empty deb_location_url url to create a docker image
|
||||||
@ -43,49 +44,68 @@ ARG single_binary_location_url=""
|
|||||||
|
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
|
|
||||||
RUN arch=${TARGETARCH:-amd64} \
|
# install from a web location with deb packages
|
||||||
|
RUN arch="${TARGETARCH:-amd64}" \
|
||||||
&& if [ -n "${deb_location_url}" ]; then \
|
&& if [ -n "${deb_location_url}" ]; then \
|
||||||
echo "installing from custom url with deb packages: ${deb_location_url}" \
|
echo "installing from custom url with deb packages: ${deb_location_url}" \
|
||||||
rm -rf /tmp/clickhouse_debs \
|
&& rm -rf /tmp/clickhouse_debs \
|
||||||
&& mkdir -p /tmp/clickhouse_debs \
|
&& mkdir -p /tmp/clickhouse_debs \
|
||||||
&& for package in ${PACKAGES}; do \
|
&& for package in ${PACKAGES}; do \
|
||||||
{ wget --progress=bar:force:noscroll "${deb_location_url}/${package}_${VERSION}_${arch}.deb" -P /tmp/clickhouse_debs || \
|
{ wget --progress=bar:force:noscroll "${deb_location_url}/${package}_${VERSION}_${arch}.deb" -P /tmp/clickhouse_debs || \
|
||||||
wget --progress=bar:force:noscroll "${deb_location_url}/${package}_${VERSION}_all.deb" -P /tmp/clickhouse_debs ; } \
|
wget --progress=bar:force:noscroll "${deb_location_url}/${package}_${VERSION}_all.deb" -P /tmp/clickhouse_debs ; } \
|
||||||
|| exit 1 \
|
|| exit 1 \
|
||||||
; done \
|
; done \
|
||||||
&& dpkg -i /tmp/clickhouse_debs/*.deb ; \
|
&& dpkg -i /tmp/clickhouse_debs/*.deb \
|
||||||
elif [ -n "${single_binary_location_url}" ]; then \
|
&& rm -rf /tmp/* ; \
|
||||||
|
fi
|
||||||
|
|
||||||
|
# install from a single binary
|
||||||
|
RUN if [ -n "${single_binary_location_url}" ]; then \
|
||||||
echo "installing from single binary url: ${single_binary_location_url}" \
|
echo "installing from single binary url: ${single_binary_location_url}" \
|
||||||
&& rm -rf /tmp/clickhouse_binary \
|
&& rm -rf /tmp/clickhouse_binary \
|
||||||
&& mkdir -p /tmp/clickhouse_binary \
|
&& mkdir -p /tmp/clickhouse_binary \
|
||||||
&& wget --progress=bar:force:noscroll "${single_binary_location_url}" -O /tmp/clickhouse_binary/clickhouse \
|
&& wget --progress=bar:force:noscroll "${single_binary_location_url}" -O /tmp/clickhouse_binary/clickhouse \
|
||||||
&& chmod +x /tmp/clickhouse_binary/clickhouse \
|
&& chmod +x /tmp/clickhouse_binary/clickhouse \
|
||||||
&& /tmp/clickhouse_binary/clickhouse install --user "clickhouse" --group "clickhouse" ; \
|
&& /tmp/clickhouse_binary/clickhouse install --user "clickhouse" --group "clickhouse" \
|
||||||
else \
|
&& rm -rf /tmp/* ; \
|
||||||
mkdir -p /etc/apt/sources.list.d \
|
fi
|
||||||
&& apt-key adv --keyserver keyserver.ubuntu.com --recv 8919F6BD2B48D754 \
|
|
||||||
&& echo ${REPOSITORY} > /etc/apt/sources.list.d/clickhouse.list \
|
# A fallback to installation from ClickHouse repository
|
||||||
|
RUN if ! clickhouse local -q "SELECT ''" > /dev/null 2>&1; then \
|
||||||
|
apt-get update \
|
||||||
|
&& apt-get install --yes --no-install-recommends \
|
||||||
|
apt-transport-https \
|
||||||
|
ca-certificates \
|
||||||
|
dirmngr \
|
||||||
|
gnupg2 \
|
||||||
|
&& mkdir -p /etc/apt/sources.list.d \
|
||||||
|
&& GNUPGHOME=$(mktemp -d) \
|
||||||
|
&& GNUPGHOME="$GNUPGHOME" gpg --no-default-keyring \
|
||||||
|
--keyring /usr/share/keyrings/clickhouse-keyring.gpg \
|
||||||
|
--keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 8919F6BD2B48D754 \
|
||||||
|
&& rm -r "$GNUPGHOME" \
|
||||||
|
&& chmod +r /usr/share/keyrings/clickhouse-keyring.gpg \
|
||||||
|
&& echo "${REPOSITORY}" > /etc/apt/sources.list.d/clickhouse.list \
|
||||||
&& echo "installing from repository: ${REPOSITORY}" \
|
&& echo "installing from repository: ${REPOSITORY}" \
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get --yes -o "Dpkg::Options::=--force-confdef" -o "Dpkg::Options::=--force-confold" upgrade \
|
|
||||||
&& for package in ${PACKAGES}; do \
|
&& for package in ${PACKAGES}; do \
|
||||||
packages="${packages} ${package}=${VERSION}" \
|
packages="${packages} ${package}=${VERSION}" \
|
||||||
; done \
|
; done \
|
||||||
&& apt-get install --allow-unauthenticated --yes --no-install-recommends ${packages} || exit 1 \
|
&& apt-get install --allow-unauthenticated --yes --no-install-recommends ${packages} || exit 1 \
|
||||||
; fi \
|
&& rm -rf \
|
||||||
&& clickhouse-local -q 'SELECT * FROM system.build_options' \
|
/var/lib/apt/lists/* \
|
||||||
&& rm -rf \
|
/var/cache/debconf \
|
||||||
/var/lib/apt/lists/* \
|
/tmp/* \
|
||||||
/var/cache/debconf \
|
&& apt-get autoremove --purge -yq libksba8 \
|
||||||
/tmp/* \
|
&& apt-get autoremove -yq \
|
||||||
&& mkdir -p /var/lib/clickhouse /var/log/clickhouse-server /etc/clickhouse-server /etc/clickhouse-client \
|
; fi
|
||||||
&& chmod ugo+Xrw -R /var/lib/clickhouse /var/log/clickhouse-server /etc/clickhouse-server /etc/clickhouse-client
|
|
||||||
|
|
||||||
RUN apt-get autoremove --purge -yq libksba8 && \
|
|
||||||
apt-get autoremove -yq
|
|
||||||
|
|
||||||
|
# post install
|
||||||
# we need to allow "others" access to clickhouse folder, because docker container
|
# we need to allow "others" access to clickhouse folder, because docker container
|
||||||
# can be started with arbitrary uid (openshift usecase)
|
# can be started with arbitrary uid (openshift usecase)
|
||||||
|
RUN clickhouse-local -q 'SELECT * FROM system.build_options' \
|
||||||
|
&& mkdir -p /var/lib/clickhouse /var/log/clickhouse-server /etc/clickhouse-server /etc/clickhouse-client \
|
||||||
|
&& chmod ugo+Xrw -R /var/lib/clickhouse /var/log/clickhouse-server /etc/clickhouse-server /etc/clickhouse-client
|
||||||
|
|
||||||
RUN locale-gen en_US.UTF-8
|
RUN locale-gen en_US.UTF-8
|
||||||
ENV LANG en_US.UTF-8
|
ENV LANG en_US.UTF-8
|
||||||
|
@ -20,7 +20,6 @@ For more information and documentation see https://clickhouse.com/.
|
|||||||
|
|
||||||
- The amd64 image requires support for [SSE3 instructions](https://en.wikipedia.org/wiki/SSE3). Virtually all x86 CPUs after 2005 support SSE3.
|
- The amd64 image requires support for [SSE3 instructions](https://en.wikipedia.org/wiki/SSE3). Virtually all x86 CPUs after 2005 support SSE3.
|
||||||
- The arm64 image requires support for the [ARMv8.2-A architecture](https://en.wikipedia.org/wiki/AArch64#ARMv8.2-A). Most ARM CPUs after 2017 support ARMv8.2-A. A notable exception is Raspberry Pi 4 from 2019 whose CPU only supports ARMv8.0-A.
|
- The arm64 image requires support for the [ARMv8.2-A architecture](https://en.wikipedia.org/wiki/AArch64#ARMv8.2-A). Most ARM CPUs after 2017 support ARMv8.2-A. A notable exception is Raspberry Pi 4 from 2019 whose CPU only supports ARMv8.0-A.
|
||||||
- Since the Clickhouse 23.3 Ubuntu image started using `ubuntu:22.04` as its base image, it requires docker version >= `20.10.10`, or use `docker run -- privileged` instead. Alternatively, try the Clickhouse Alpine image.
|
|
||||||
|
|
||||||
## How to use this image
|
## How to use this image
|
||||||
|
|
||||||
@ -98,8 +97,8 @@ docker run -d \
|
|||||||
|
|
||||||
You may also want to mount:
|
You may also want to mount:
|
||||||
|
|
||||||
* `/etc/clickhouse-server/config.d/*.xml` - files with server configuration adjustmenets
|
* `/etc/clickhouse-server/config.d/*.xml` - files with server configuration adjustments
|
||||||
* `/etc/clickhouse-server/users.d/*.xml` - files with user settings adjustmenets
|
* `/etc/clickhouse-server/users.d/*.xml` - files with user settings adjustments
|
||||||
* `/docker-entrypoint-initdb.d/` - folder with database initialization scripts (see below).
|
* `/docker-entrypoint-initdb.d/` - folder with database initialization scripts (see below).
|
||||||
|
|
||||||
### Linux capabilities
|
### Linux capabilities
|
||||||
|
@ -9,6 +9,7 @@ RUN apt-get update \
|
|||||||
expect \
|
expect \
|
||||||
file \
|
file \
|
||||||
lsof \
|
lsof \
|
||||||
|
odbcinst \
|
||||||
psmisc \
|
psmisc \
|
||||||
python3 \
|
python3 \
|
||||||
python3-lxml \
|
python3-lxml \
|
||||||
|
@ -80,7 +80,7 @@ function start_server
|
|||||||
|
|
||||||
function clone_root
|
function clone_root
|
||||||
{
|
{
|
||||||
git config --global --add safe.directory "$FASTTEST_SOURCE"
|
[ "$UID" -eq 0 ] && git config --global --add safe.directory "$FASTTEST_SOURCE"
|
||||||
git clone --depth 1 https://github.com/ClickHouse/ClickHouse.git -- "$FASTTEST_SOURCE" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/clone_log.txt"
|
git clone --depth 1 https://github.com/ClickHouse/ClickHouse.git -- "$FASTTEST_SOURCE" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/clone_log.txt"
|
||||||
|
|
||||||
(
|
(
|
||||||
@ -141,17 +141,17 @@ function clone_submodules
|
|||||||
contrib/jemalloc
|
contrib/jemalloc
|
||||||
contrib/replxx
|
contrib/replxx
|
||||||
contrib/wyhash
|
contrib/wyhash
|
||||||
contrib/hashidsxx
|
|
||||||
contrib/c-ares
|
contrib/c-ares
|
||||||
contrib/morton-nd
|
contrib/morton-nd
|
||||||
contrib/xxHash
|
contrib/xxHash
|
||||||
contrib/simdjson
|
contrib/simdjson
|
||||||
contrib/liburing
|
contrib/liburing
|
||||||
contrib/libfiu
|
contrib/libfiu
|
||||||
|
contrib/incbin
|
||||||
)
|
)
|
||||||
|
|
||||||
git submodule sync
|
git submodule sync
|
||||||
git submodule update --jobs=16 --depth 1 --init "${SUBMODULES_TO_UPDATE[@]}"
|
git submodule update --jobs=16 --depth 1 --single-branch --init "${SUBMODULES_TO_UPDATE[@]}"
|
||||||
git submodule foreach git reset --hard
|
git submodule foreach git reset --hard
|
||||||
git submodule foreach git checkout @ -f
|
git submodule foreach git checkout @ -f
|
||||||
git submodule foreach git clean -xfd
|
git submodule foreach git clean -xfd
|
||||||
@ -166,7 +166,6 @@ function run_cmake
|
|||||||
"-DENABLE_UTILS=0"
|
"-DENABLE_UTILS=0"
|
||||||
"-DENABLE_EMBEDDED_COMPILER=0"
|
"-DENABLE_EMBEDDED_COMPILER=0"
|
||||||
"-DENABLE_THINLTO=0"
|
"-DENABLE_THINLTO=0"
|
||||||
"-DUSE_UNWIND=1"
|
|
||||||
"-DENABLE_NURAFT=1"
|
"-DENABLE_NURAFT=1"
|
||||||
"-DENABLE_SIMDJSON=1"
|
"-DENABLE_SIMDJSON=1"
|
||||||
"-DENABLE_JEMALLOC=1"
|
"-DENABLE_JEMALLOC=1"
|
||||||
@ -202,10 +201,11 @@ function build
|
|||||||
| ts '%Y-%m-%d %H:%M:%S' \
|
| ts '%Y-%m-%d %H:%M:%S' \
|
||||||
| tee "$FASTTEST_OUTPUT/test_result.txt"
|
| tee "$FASTTEST_OUTPUT/test_result.txt"
|
||||||
if [ "$COPY_CLICKHOUSE_BINARY_TO_OUTPUT" -eq "1" ]; then
|
if [ "$COPY_CLICKHOUSE_BINARY_TO_OUTPUT" -eq "1" ]; then
|
||||||
cp programs/clickhouse "$FASTTEST_OUTPUT/clickhouse"
|
mkdir -p "$FASTTEST_OUTPUT/binaries/"
|
||||||
|
cp programs/clickhouse "$FASTTEST_OUTPUT/binaries/clickhouse"
|
||||||
|
|
||||||
strip programs/clickhouse -o "$FASTTEST_OUTPUT/clickhouse-stripped"
|
strip programs/clickhouse -o programs/clickhouse-stripped
|
||||||
zstd --threads=0 "$FASTTEST_OUTPUT/clickhouse-stripped"
|
zstd --threads=0 programs/clickhouse-stripped -o "$FASTTEST_OUTPUT/binaries/clickhouse-stripped.zst"
|
||||||
fi
|
fi
|
||||||
ccache_status
|
ccache_status
|
||||||
ccache --evict-older-than 1d ||:
|
ccache --evict-older-than 1d ||:
|
||||||
|
@ -291,7 +291,7 @@ quit
|
|||||||
if [ "$server_died" == 1 ]
|
if [ "$server_died" == 1 ]
|
||||||
then
|
then
|
||||||
# The server has died.
|
# The server has died.
|
||||||
if ! rg --text -o 'Received signal.*|Logical error.*|Assertion.*failed|Failed assertion.*|.*runtime error: .*|.*is located.*|(SUMMARY|ERROR): [a-zA-Z]+Sanitizer:.*|.*_LIBCPP_ASSERT.*' server.log > description.txt
|
if ! rg --text -o 'Received signal.*|Logical error.*|Assertion.*failed|Failed assertion.*|.*runtime error: .*|.*is located.*|(SUMMARY|ERROR): [a-zA-Z]+Sanitizer:.*|.*_LIBCPP_ASSERT.*|.*Child process was terminated by signal 9.*' server.log > description.txt
|
||||||
then
|
then
|
||||||
echo "Lost connection to server. See the logs." > description.txt
|
echo "Lost connection to server. See the logs." > description.txt
|
||||||
fi
|
fi
|
||||||
|
@ -46,12 +46,13 @@ RUN arch=${TARGETARCH:-amd64} \
|
|||||||
arm64) rarch=aarch64 ;; \
|
arm64) rarch=aarch64 ;; \
|
||||||
esac \
|
esac \
|
||||||
&& cd /tmp \
|
&& cd /tmp \
|
||||||
&& curl -o mysql-odbc.rpm "https://cdn.mysql.com/archives/mysql-connector-odbc-8.0/mysql-connector-odbc-8.0.27-1.el8.${rarch}.rpm" \
|
&& curl -o mysql-odbc.rpm "https://cdn.mysql.com/archives/mysql-connector-odbc-8.0/mysql-connector-odbc-8.0.32-1.el9.${rarch}.rpm" \
|
||||||
&& rpm2archive mysql-odbc.rpm \
|
&& rpm2archive mysql-odbc.rpm \
|
||||||
&& tar xf mysql-odbc.rpm.tgz -C / ./usr/lib64/ \
|
&& tar xf mysql-odbc.rpm.tgz -C / ./usr/lib64/ \
|
||||||
&& LINK_DIR=$(dpkg -L libodbc1 | rg '^/usr/lib/.*-linux-gnu/odbc$') \
|
&& rm mysql-odbc.rpm mysql-odbc.rpm.tgz \
|
||||||
&& ln -s /usr/lib64/libmyodbc8a.so "$LINK_DIR" \
|
&& ODBC_DIR=$(dpkg -L odbc-postgresql | rg '^/usr/lib/.*-linux-gnu/odbc$') \
|
||||||
&& ln -s /usr/lib64/libmyodbc8a.so "$LINK_DIR"/libmyodbc.so
|
&& ln -s /usr/lib64/libmyodbc8a.so "$ODBC_DIR" \
|
||||||
|
&& ln -s /usr/lib64/libmyodbc8a.so "$ODBC_DIR"/libmyodbc.so
|
||||||
|
|
||||||
# Unfortunately this is required for a single test for conversion data from zookeeper to clickhouse-keeper.
|
# Unfortunately this is required for a single test for conversion data from zookeeper to clickhouse-keeper.
|
||||||
# ZooKeeper is not started by default, but consumes some space in containers.
|
# ZooKeeper is not started by default, but consumes some space in containers.
|
||||||
|
@ -2,4 +2,7 @@
|
|||||||
# Helper docker container to run iptables without sudo
|
# Helper docker container to run iptables without sudo
|
||||||
|
|
||||||
FROM alpine
|
FROM alpine
|
||||||
RUN apk add -U iproute2
|
RUN apk add --no-cache -U iproute2 \
|
||||||
|
&& for bin in iptables iptables-restore iptables-save; \
|
||||||
|
do ln -sf xtables-nft-multi "/sbin/$bin"; \
|
||||||
|
done
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
# docker build -t clickhouse/mysql-php-client .
|
# docker build -t clickhouse/mysql-php-client .
|
||||||
# MySQL PHP client docker container
|
# MySQL PHP client docker container
|
||||||
|
|
||||||
FROM php:8.0.18-cli
|
FROM php:8-cli-alpine
|
||||||
|
|
||||||
COPY ./client.crt client.crt
|
COPY ./client.crt client.crt
|
||||||
COPY ./client.key client.key
|
COPY ./client.key client.key
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# docker build -t clickhouse/integration-tests-runner .
|
# docker build -t clickhouse/integration-tests-runner .
|
||||||
FROM ubuntu:20.04
|
FROM ubuntu:22.04
|
||||||
|
|
||||||
# ARG for quick switch to a given ubuntu mirror
|
# ARG for quick switch to a given ubuntu mirror
|
||||||
ARG apt_archive="http://archive.ubuntu.com"
|
ARG apt_archive="http://archive.ubuntu.com"
|
||||||
@ -47,26 +47,30 @@ ENV TZ=Etc/UTC
|
|||||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
||||||
|
|
||||||
ENV DOCKER_CHANNEL stable
|
ENV DOCKER_CHANNEL stable
|
||||||
|
# Unpin the docker version after the release 24.0.3 is released
|
||||||
|
# https://github.com/moby/moby/issues/45770#issuecomment-1618255130
|
||||||
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add - \
|
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add - \
|
||||||
&& add-apt-repository "deb https://download.docker.com/linux/ubuntu $(lsb_release -c -s) ${DOCKER_CHANNEL}" \
|
&& add-apt-repository "deb https://download.docker.com/linux/ubuntu $(lsb_release -c -s) ${DOCKER_CHANNEL}" \
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& env DEBIAN_FRONTEND=noninteractive apt-get install --yes \
|
&& env DEBIAN_FRONTEND=noninteractive apt-get install --yes \
|
||||||
docker-ce \
|
docker-ce='5:23.*' \
|
||||||
&& rm -rf \
|
&& rm -rf \
|
||||||
/var/lib/apt/lists/* \
|
/var/lib/apt/lists/* \
|
||||||
/var/cache/debconf \
|
/var/cache/debconf \
|
||||||
/tmp/* \
|
/tmp/* \
|
||||||
&& apt-get clean
|
&& apt-get clean \
|
||||||
|
&& dockerd --version; docker --version
|
||||||
|
|
||||||
RUN dockerd --version; docker --version
|
|
||||||
|
|
||||||
RUN python3 -m pip install --no-cache-dir \
|
RUN python3 -m pip install --no-cache-dir \
|
||||||
PyMySQL \
|
PyMySQL \
|
||||||
aerospike==4.0.0 \
|
aerospike==11.1.0 \
|
||||||
avro==1.10.2 \
|
|
||||||
asyncio \
|
asyncio \
|
||||||
|
avro==1.10.2 \
|
||||||
|
azure-storage-blob \
|
||||||
cassandra-driver \
|
cassandra-driver \
|
||||||
confluent-kafka==1.5.0 \
|
confluent-kafka==1.9.2 \
|
||||||
|
delta-spark==2.3.0 \
|
||||||
dict2xml \
|
dict2xml \
|
||||||
dicttoxml \
|
dicttoxml \
|
||||||
docker \
|
docker \
|
||||||
@ -76,47 +80,52 @@ RUN python3 -m pip install --no-cache-dir \
|
|||||||
kafka-python \
|
kafka-python \
|
||||||
kazoo \
|
kazoo \
|
||||||
lz4 \
|
lz4 \
|
||||||
|
meilisearch==0.18.3 \
|
||||||
minio \
|
minio \
|
||||||
nats-py \
|
nats-py \
|
||||||
protobuf \
|
protobuf \
|
||||||
psycopg2-binary==2.8.6 \
|
psycopg2-binary==2.9.6 \
|
||||||
|
pyhdfs \
|
||||||
pymongo==3.11.0 \
|
pymongo==3.11.0 \
|
||||||
|
pyspark==3.3.2 \
|
||||||
pytest \
|
pytest \
|
||||||
pytest-order==1.0.0 \
|
pytest-order==1.0.0 \
|
||||||
pytest-timeout \
|
|
||||||
pytest-random \
|
pytest-random \
|
||||||
pytest-xdist \
|
|
||||||
pytest-repeat \
|
pytest-repeat \
|
||||||
|
pytest-timeout \
|
||||||
|
pytest-xdist \
|
||||||
pytz \
|
pytz \
|
||||||
redis \
|
redis \
|
||||||
tzlocal==2.1 \
|
|
||||||
urllib3 \
|
|
||||||
requests-kerberos \
|
requests-kerberos \
|
||||||
pyspark==3.3.2 \
|
tzlocal==2.1 \
|
||||||
delta-spark==2.2.0 \
|
retry \
|
||||||
pyhdfs \
|
urllib3
|
||||||
azure-storage-blob \
|
|
||||||
meilisearch==0.18.3
|
|
||||||
|
|
||||||
COPY modprobe.sh /usr/local/bin/modprobe
|
|
||||||
COPY dockerd-entrypoint.sh /usr/local/bin/
|
|
||||||
COPY compose/ /compose/
|
|
||||||
COPY misc/ /misc/
|
|
||||||
|
|
||||||
|
# Hudi supports only spark 3.3.*, not 3.4
|
||||||
RUN curl -fsSL -O https://dlcdn.apache.org/spark/spark-3.3.2/spark-3.3.2-bin-hadoop3.tgz \
|
RUN curl -fsSL -O https://dlcdn.apache.org/spark/spark-3.3.2/spark-3.3.2-bin-hadoop3.tgz \
|
||||||
&& tar xzvf spark-3.3.2-bin-hadoop3.tgz -C / \
|
&& tar xzvf spark-3.3.2-bin-hadoop3.tgz -C / \
|
||||||
&& rm spark-3.3.2-bin-hadoop3.tgz
|
&& rm spark-3.3.2-bin-hadoop3.tgz
|
||||||
|
|
||||||
# download spark and packages
|
# download spark and packages
|
||||||
# if you change packages, don't forget to update them in tests/integration/helpers/cluster.py
|
# if you change packages, don't forget to update them in tests/integration/helpers/cluster.py
|
||||||
RUN echo ":quit" | /spark-3.3.2-bin-hadoop3/bin/spark-shell --packages "org.apache.hudi:hudi-spark3.3-bundle_2.12:0.13.0,io.delta:delta-core_2.12:2.2.0,org.apache.iceberg:iceberg-spark-runtime-3.3_2.12:1.1.0" > /dev/null
|
RUN packages="org.apache.hudi:hudi-spark3.3-bundle_2.12:0.13.0,\
|
||||||
|
io.delta:delta-core_2.12:2.3.0,\
|
||||||
|
org.apache.iceberg:iceberg-spark-runtime-3.3_2.12:1.1.0" \
|
||||||
|
&& /spark-3.3.2-bin-hadoop3/bin/spark-shell --packages "$packages" > /dev/null \
|
||||||
|
&& find /root/.ivy2/ -name '*.jar' -exec ln -sf {} /spark-3.3.2-bin-hadoop3/jars/ \;
|
||||||
|
|
||||||
RUN set -x \
|
RUN set -x \
|
||||||
&& addgroup --system dockremap \
|
&& addgroup --system dockremap \
|
||||||
&& adduser --system dockremap \
|
&& adduser --system dockremap \
|
||||||
&& adduser dockremap dockremap \
|
&& adduser dockremap dockremap \
|
||||||
&& echo 'dockremap:165536:65536' >> /etc/subuid \
|
&& echo 'dockremap:165536:65536' >> /etc/subuid \
|
||||||
&& echo 'dockremap:165536:65536' >> /etc/subgid
|
&& echo 'dockremap:165536:65536' >> /etc/subgid
|
||||||
|
|
||||||
|
COPY modprobe.sh /usr/local/bin/modprobe
|
||||||
|
COPY dockerd-entrypoint.sh /usr/local/bin/
|
||||||
|
COPY compose/ /compose/
|
||||||
|
COPY misc/ /misc/
|
||||||
|
|
||||||
|
|
||||||
# Same options as in test/base/Dockerfile
|
# Same options as in test/base/Dockerfile
|
||||||
# (in case you need to override them in tests)
|
# (in case you need to override them in tests)
|
||||||
@ -126,4 +135,5 @@ ENV MSAN_OPTIONS='abort_on_error=1 poison_in_dtor=1'
|
|||||||
|
|
||||||
EXPOSE 2375
|
EXPOSE 2375
|
||||||
ENTRYPOINT ["dockerd-entrypoint.sh"]
|
ENTRYPOINT ["dockerd-entrypoint.sh"]
|
||||||
CMD ["sh", "-c", "pytest $PYTEST_OPTS"]
|
# To pass additional arguments (i.e. list of tests) use PYTEST_ADDOPTS
|
||||||
|
CMD ["sh", "-c", "pytest"]
|
||||||
|
@ -4,6 +4,8 @@ services:
|
|||||||
kafka_zookeeper:
|
kafka_zookeeper:
|
||||||
image: zookeeper:3.4.9
|
image: zookeeper:3.4.9
|
||||||
hostname: kafka_zookeeper
|
hostname: kafka_zookeeper
|
||||||
|
ports:
|
||||||
|
- 2181:2181
|
||||||
environment:
|
environment:
|
||||||
ZOO_MY_ID: 1
|
ZOO_MY_ID: 1
|
||||||
ZOO_PORT: 2181
|
ZOO_PORT: 2181
|
||||||
@ -15,15 +17,14 @@ services:
|
|||||||
image: confluentinc/cp-kafka:5.2.0
|
image: confluentinc/cp-kafka:5.2.0
|
||||||
hostname: kafka1
|
hostname: kafka1
|
||||||
ports:
|
ports:
|
||||||
- ${KAFKA_EXTERNAL_PORT:-8081}:${KAFKA_EXTERNAL_PORT:-8081}
|
- ${KAFKA_EXTERNAL_PORT}:${KAFKA_EXTERNAL_PORT}
|
||||||
environment:
|
environment:
|
||||||
KAFKA_ADVERTISED_LISTENERS: INSIDE://localhost:${KAFKA_EXTERNAL_PORT},OUTSIDE://kafka1:19092
|
KAFKA_ADVERTISED_LISTENERS: INSIDE://localhost:${KAFKA_EXTERNAL_PORT},OUTSIDE://kafka1:19092
|
||||||
KAFKA_ADVERTISED_HOST_NAME: kafka1
|
KAFKA_ADVERTISED_HOST_NAME: kafka1
|
||||||
KAFKA_LISTENERS: INSIDE://0.0.0.0:${KAFKA_EXTERNAL_PORT},OUTSIDE://0.0.0.0:19092
|
|
||||||
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INSIDE:PLAINTEXT,OUTSIDE:PLAINTEXT
|
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INSIDE:PLAINTEXT,OUTSIDE:PLAINTEXT
|
||||||
KAFKA_INTER_BROKER_LISTENER_NAME: INSIDE
|
KAFKA_INTER_BROKER_LISTENER_NAME: INSIDE
|
||||||
KAFKA_BROKER_ID: 1
|
KAFKA_BROKER_ID: 1
|
||||||
KAFKA_ZOOKEEPER_CONNECT: "kafka_zookeeper:2181"
|
KAFKA_ZOOKEEPER_CONNECT: kafka_zookeeper:2181
|
||||||
KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
|
KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
|
||||||
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
|
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
|
||||||
depends_on:
|
depends_on:
|
||||||
@ -35,13 +36,38 @@ services:
|
|||||||
image: confluentinc/cp-schema-registry:5.2.0
|
image: confluentinc/cp-schema-registry:5.2.0
|
||||||
hostname: schema-registry
|
hostname: schema-registry
|
||||||
ports:
|
ports:
|
||||||
- ${SCHEMA_REGISTRY_EXTERNAL_PORT:-12313}:${SCHEMA_REGISTRY_INTERNAL_PORT:-12313}
|
- ${SCHEMA_REGISTRY_EXTERNAL_PORT}:${SCHEMA_REGISTRY_EXTERNAL_PORT}
|
||||||
environment:
|
environment:
|
||||||
SCHEMA_REGISTRY_HOST_NAME: schema-registry
|
SCHEMA_REGISTRY_HOST_NAME: schema-registry
|
||||||
SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
|
|
||||||
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka1:19092
|
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka1:19092
|
||||||
|
SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:${SCHEMA_REGISTRY_EXTERNAL_PORT}
|
||||||
|
SCHEMA_REGISTRY_SCHEMA_REGISTRY_GROUP_ID: noauth
|
||||||
depends_on:
|
depends_on:
|
||||||
- kafka_zookeeper
|
- kafka_zookeeper
|
||||||
- kafka1
|
- kafka1
|
||||||
|
restart: always
|
||||||
|
security_opt:
|
||||||
|
- label:disable
|
||||||
|
|
||||||
|
schema-registry-auth:
|
||||||
|
image: confluentinc/cp-schema-registry:5.2.0
|
||||||
|
hostname: schema-registry-auth
|
||||||
|
ports:
|
||||||
|
- ${SCHEMA_REGISTRY_AUTH_EXTERNAL_PORT}:${SCHEMA_REGISTRY_AUTH_EXTERNAL_PORT}
|
||||||
|
environment:
|
||||||
|
SCHEMA_REGISTRY_HOST_NAME: schema-registry-auth
|
||||||
|
SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:${SCHEMA_REGISTRY_AUTH_EXTERNAL_PORT}
|
||||||
|
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka1:19092
|
||||||
|
SCHEMA_REGISTRY_AUTHENTICATION_METHOD: BASIC
|
||||||
|
SCHEMA_REGISTRY_AUTHENTICATION_ROLES: user
|
||||||
|
SCHEMA_REGISTRY_AUTHENTICATION_REALM: RealmFooBar
|
||||||
|
SCHEMA_REGISTRY_OPTS: "-Djava.security.auth.login.config=/etc/schema-registry/secrets/schema_registry_jaas.conf"
|
||||||
|
SCHEMA_REGISTRY_SCHEMA_REGISTRY_GROUP_ID: auth
|
||||||
|
volumes:
|
||||||
|
- ${SCHEMA_REGISTRY_DIR:-}/secrets:/etc/schema-registry/secrets
|
||||||
|
depends_on:
|
||||||
|
- kafka_zookeeper
|
||||||
|
- kafka1
|
||||||
|
restart: always
|
||||||
security_opt:
|
security_opt:
|
||||||
- label:disable
|
- label:disable
|
||||||
|
@ -12,6 +12,17 @@ echo '{
|
|||||||
"registry-mirrors" : ["http://dockerhub-proxy.dockerhub-proxy-zone:5000"]
|
"registry-mirrors" : ["http://dockerhub-proxy.dockerhub-proxy-zone:5000"]
|
||||||
}' | dd of=/etc/docker/daemon.json 2>/dev/null
|
}' | dd of=/etc/docker/daemon.json 2>/dev/null
|
||||||
|
|
||||||
|
if [ -f /sys/fs/cgroup/cgroup.controllers ]; then
|
||||||
|
# move the processes from the root group to the /init group,
|
||||||
|
# otherwise writing subtree_control fails with EBUSY.
|
||||||
|
# An error during moving non-existent process (i.e., "cat") is ignored.
|
||||||
|
mkdir -p /sys/fs/cgroup/init
|
||||||
|
xargs -rn1 < /sys/fs/cgroup/cgroup.procs > /sys/fs/cgroup/init/cgroup.procs || :
|
||||||
|
# enable controllers
|
||||||
|
sed -e 's/ / +/g' -e 's/^/+/' < /sys/fs/cgroup/cgroup.controllers \
|
||||||
|
> /sys/fs/cgroup/cgroup.subtree_control
|
||||||
|
fi
|
||||||
|
|
||||||
# In case of test hung it is convenient to use pytest --pdb to debug it,
|
# In case of test hung it is convenient to use pytest --pdb to debug it,
|
||||||
# and on hung you can simply press Ctrl-C and it will spawn a python pdb,
|
# and on hung you can simply press Ctrl-C and it will spawn a python pdb,
|
||||||
# but on SIGINT dockerd will exit, so ignore it to preserve the daemon.
|
# but on SIGINT dockerd will exit, so ignore it to preserve the daemon.
|
||||||
@ -52,6 +63,8 @@ export CLICKHOUSE_TESTS_BASE_CONFIG_DIR=/clickhouse-config
|
|||||||
export CLICKHOUSE_ODBC_BRIDGE_BINARY_PATH=/clickhouse-odbc-bridge
|
export CLICKHOUSE_ODBC_BRIDGE_BINARY_PATH=/clickhouse-odbc-bridge
|
||||||
export CLICKHOUSE_LIBRARY_BRIDGE_BINARY_PATH=/clickhouse-library-bridge
|
export CLICKHOUSE_LIBRARY_BRIDGE_BINARY_PATH=/clickhouse-library-bridge
|
||||||
|
|
||||||
|
export DOCKER_BASE_TAG=${DOCKER_BASE_TAG:=latest}
|
||||||
|
export DOCKER_HELPER_TAG=${DOCKER_HELPER_TAG:=latest}
|
||||||
export DOCKER_MYSQL_GOLANG_CLIENT_TAG=${DOCKER_MYSQL_GOLANG_CLIENT_TAG:=latest}
|
export DOCKER_MYSQL_GOLANG_CLIENT_TAG=${DOCKER_MYSQL_GOLANG_CLIENT_TAG:=latest}
|
||||||
export DOCKER_DOTNET_CLIENT_TAG=${DOCKER_DOTNET_CLIENT_TAG:=latest}
|
export DOCKER_DOTNET_CLIENT_TAG=${DOCKER_DOTNET_CLIENT_TAG:=latest}
|
||||||
export DOCKER_MYSQL_JAVA_CLIENT_TAG=${DOCKER_MYSQL_JAVA_CLIENT_TAG:=latest}
|
export DOCKER_MYSQL_JAVA_CLIENT_TAG=${DOCKER_MYSQL_JAVA_CLIENT_TAG:=latest}
|
||||||
|
@ -14,6 +14,13 @@ LEFT_SERVER_PORT=9001
|
|||||||
# patched version
|
# patched version
|
||||||
RIGHT_SERVER_PORT=9002
|
RIGHT_SERVER_PORT=9002
|
||||||
|
|
||||||
|
# abort_conf -- abort if some options is not recognized
|
||||||
|
# abort -- abort if something is not right in the env (i.e. per-cpu arenas does not work)
|
||||||
|
# narenas -- set them explicitly to avoid disabling per-cpu arena in env
|
||||||
|
# that returns different number of CPUs for some of the following
|
||||||
|
# _SC_NPROCESSORS_ONLN/_SC_NPROCESSORS_CONF/sched_getaffinity
|
||||||
|
export MALLOC_CONF="abort_conf:true,abort:true,narenas:$(nproc --all)"
|
||||||
|
|
||||||
function wait_for_server # port, pid
|
function wait_for_server # port, pid
|
||||||
{
|
{
|
||||||
for _ in {1..60}
|
for _ in {1..60}
|
||||||
@ -109,10 +116,6 @@ function restart
|
|||||||
while pkill -f clickhouse-serv ; do echo . ; sleep 1 ; done
|
while pkill -f clickhouse-serv ; do echo . ; sleep 1 ; done
|
||||||
echo all killed
|
echo all killed
|
||||||
|
|
||||||
# Change the jemalloc settings here.
|
|
||||||
# https://github.com/jemalloc/jemalloc/wiki/Getting-Started
|
|
||||||
export MALLOC_CONF="confirm_conf:true"
|
|
||||||
|
|
||||||
set -m # Spawn servers in their own process groups
|
set -m # Spawn servers in their own process groups
|
||||||
|
|
||||||
local left_server_opts=(
|
local left_server_opts=(
|
||||||
@ -147,8 +150,6 @@ function restart
|
|||||||
|
|
||||||
set +m
|
set +m
|
||||||
|
|
||||||
unset MALLOC_CONF
|
|
||||||
|
|
||||||
wait_for_server $LEFT_SERVER_PORT $left_pid
|
wait_for_server $LEFT_SERVER_PORT $left_pid
|
||||||
echo left ok
|
echo left ok
|
||||||
|
|
||||||
|
@ -16,7 +16,6 @@ def process_result(result_folder):
|
|||||||
"TLPGroupBy",
|
"TLPGroupBy",
|
||||||
"TLPHaving",
|
"TLPHaving",
|
||||||
"TLPWhere",
|
"TLPWhere",
|
||||||
"TLPWhereGroupBy",
|
|
||||||
"NoREC",
|
"NoREC",
|
||||||
]
|
]
|
||||||
failed_tests = []
|
failed_tests = []
|
||||||
|
@ -33,7 +33,7 @@ cd /workspace
|
|||||||
|
|
||||||
for _ in $(seq 1 60); do if [[ $(wget -q 'localhost:8123' -O-) == 'Ok.' ]]; then break ; else sleep 1; fi ; done
|
for _ in $(seq 1 60); do if [[ $(wget -q 'localhost:8123' -O-) == 'Ok.' ]]; then break ; else sleep 1; fi ; done
|
||||||
|
|
||||||
cd /sqlancer/sqlancer-master
|
cd /sqlancer/sqlancer-main
|
||||||
|
|
||||||
TIMEOUT=300
|
TIMEOUT=300
|
||||||
NUM_QUERIES=1000
|
NUM_QUERIES=1000
|
||||||
|
@ -13,6 +13,7 @@ RUN apt-get update --yes \
|
|||||||
sqlite3 \
|
sqlite3 \
|
||||||
unixodbc \
|
unixodbc \
|
||||||
unixodbc-dev \
|
unixodbc-dev \
|
||||||
|
odbcinst \
|
||||||
sudo \
|
sudo \
|
||||||
&& apt-get clean
|
&& apt-get clean
|
||||||
|
|
||||||
|
@ -92,8 +92,8 @@ sudo clickhouse stop ||:
|
|||||||
|
|
||||||
for _ in $(seq 1 60); do if [[ $(wget --timeout=1 -q 'localhost:8123' -O-) == 'Ok.' ]]; then sleep 1 ; else break; fi ; done
|
for _ in $(seq 1 60); do if [[ $(wget --timeout=1 -q 'localhost:8123' -O-) == 'Ok.' ]]; then sleep 1 ; else break; fi ; done
|
||||||
|
|
||||||
grep -Fa "Fatal" /var/log/clickhouse-server/clickhouse-server.log ||:
|
rg -Fa "Fatal" /var/log/clickhouse-server/clickhouse-server.log ||:
|
||||||
pigz < /var/log/clickhouse-server/clickhouse-server.log > /test_output/clickhouse-server.log.gz &
|
zstd < /var/log/clickhouse-server/clickhouse-server.log > /test_output/clickhouse-server.log.zst &
|
||||||
|
|
||||||
# Compressed (FIXME: remove once only github actions will be left)
|
# Compressed (FIXME: remove once only github actions will be left)
|
||||||
rm /var/log/clickhouse-server/clickhouse-server.log
|
rm /var/log/clickhouse-server/clickhouse-server.log
|
||||||
|
@ -16,8 +16,9 @@ COPY s3downloader /s3downloader
|
|||||||
ENV S3_URL="https://clickhouse-datasets.s3.amazonaws.com"
|
ENV S3_URL="https://clickhouse-datasets.s3.amazonaws.com"
|
||||||
ENV DATASETS="hits visits"
|
ENV DATASETS="hits visits"
|
||||||
|
|
||||||
RUN npm install -g azurite
|
# The following is already done in clickhouse/stateless-test
|
||||||
RUN npm install tslib
|
# RUN npm install -g azurite
|
||||||
|
# RUN npm install tslib
|
||||||
|
|
||||||
COPY run.sh /
|
COPY run.sh /
|
||||||
CMD ["/bin/bash", "/run.sh"]
|
CMD ["/bin/bash", "/run.sh"]
|
||||||
|
@ -20,6 +20,7 @@ RUN apt-get update -y \
|
|||||||
netcat-openbsd \
|
netcat-openbsd \
|
||||||
nodejs \
|
nodejs \
|
||||||
npm \
|
npm \
|
||||||
|
odbcinst \
|
||||||
openjdk-11-jre-headless \
|
openjdk-11-jre-headless \
|
||||||
openssl \
|
openssl \
|
||||||
postgresql-client \
|
postgresql-client \
|
||||||
@ -32,7 +33,6 @@ RUN apt-get update -y \
|
|||||||
qemu-user-static \
|
qemu-user-static \
|
||||||
sqlite3 \
|
sqlite3 \
|
||||||
sudo \
|
sudo \
|
||||||
telnet \
|
|
||||||
tree \
|
tree \
|
||||||
unixodbc \
|
unixodbc \
|
||||||
wget \
|
wget \
|
||||||
@ -71,7 +71,7 @@ RUN arch=${TARGETARCH:-amd64} \
|
|||||||
&& chmod +x ./mc ./minio
|
&& chmod +x ./mc ./minio
|
||||||
|
|
||||||
|
|
||||||
RUN wget 'https://dlcdn.apache.org/hadoop/common/hadoop-3.3.1/hadoop-3.3.1.tar.gz' \
|
RUN wget --no-verbose 'https://dlcdn.apache.org/hadoop/common/hadoop-3.3.1/hadoop-3.3.1.tar.gz' \
|
||||||
&& tar -xvf hadoop-3.3.1.tar.gz \
|
&& tar -xvf hadoop-3.3.1.tar.gz \
|
||||||
&& rm -rf hadoop-3.3.1.tar.gz
|
&& rm -rf hadoop-3.3.1.tar.gz
|
||||||
|
|
||||||
@ -79,8 +79,8 @@ ENV MINIO_ROOT_USER="clickhouse"
|
|||||||
ENV MINIO_ROOT_PASSWORD="clickhouse"
|
ENV MINIO_ROOT_PASSWORD="clickhouse"
|
||||||
ENV EXPORT_S3_STORAGE_POLICIES=1
|
ENV EXPORT_S3_STORAGE_POLICIES=1
|
||||||
|
|
||||||
RUN npm install -g azurite
|
RUN npm install -g azurite \
|
||||||
RUN npm install tslib
|
&& npm install -g tslib
|
||||||
|
|
||||||
COPY run.sh /
|
COPY run.sh /
|
||||||
COPY setup_minio.sh /
|
COPY setup_minio.sh /
|
||||||
|
@ -4,6 +4,9 @@
|
|||||||
set -e -x -a
|
set -e -x -a
|
||||||
|
|
||||||
# Choose random timezone for this test run.
|
# Choose random timezone for this test run.
|
||||||
|
#
|
||||||
|
# NOTE: that clickhouse-test will randomize session_timezone by itself as well
|
||||||
|
# (it will choose between default server timezone and something specific).
|
||||||
TZ="$(rg -v '#' /usr/share/zoneinfo/zone.tab | awk '{print $3}' | shuf | head -n1)"
|
TZ="$(rg -v '#' /usr/share/zoneinfo/zone.tab | awk '{print $3}' | shuf | head -n1)"
|
||||||
echo "Choosen random timezone $TZ"
|
echo "Choosen random timezone $TZ"
|
||||||
ln -snf "/usr/share/zoneinfo/$TZ" /etc/localtime && echo "$TZ" > /etc/timezone
|
ln -snf "/usr/share/zoneinfo/$TZ" /etc/localtime && echo "$TZ" > /etc/timezone
|
||||||
@ -18,6 +21,9 @@ ln -s /usr/share/clickhouse-test/clickhouse-test /usr/bin/clickhouse-test
|
|||||||
# shellcheck disable=SC1091
|
# shellcheck disable=SC1091
|
||||||
source /usr/share/clickhouse-test/ci/attach_gdb.lib || true # FIXME: to not break old builds, clean on 2023-09-01
|
source /usr/share/clickhouse-test/ci/attach_gdb.lib || true # FIXME: to not break old builds, clean on 2023-09-01
|
||||||
|
|
||||||
|
# shellcheck disable=SC1091
|
||||||
|
source /usr/share/clickhouse-test/ci/utils.lib || true # FIXME: to not break old builds, clean on 2023-09-01
|
||||||
|
|
||||||
# install test configs
|
# install test configs
|
||||||
/usr/share/clickhouse-test/config/install.sh
|
/usr/share/clickhouse-test/config/install.sh
|
||||||
|
|
||||||
@ -90,6 +96,22 @@ sleep 5
|
|||||||
|
|
||||||
attach_gdb_to_clickhouse || true # FIXME: to not break old builds, clean on 2023-09-01
|
attach_gdb_to_clickhouse || true # FIXME: to not break old builds, clean on 2023-09-01
|
||||||
|
|
||||||
|
function fn_exists() {
|
||||||
|
declare -F "$1" > /dev/null;
|
||||||
|
}
|
||||||
|
|
||||||
|
# FIXME: to not break old builds, clean on 2023-09-01
|
||||||
|
function try_run_with_retry() {
|
||||||
|
local total_retries="$1"
|
||||||
|
shift
|
||||||
|
|
||||||
|
if fn_exists run_with_retry; then
|
||||||
|
run_with_retry "$total_retries" "$@"
|
||||||
|
else
|
||||||
|
"$@"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
function run_tests()
|
function run_tests()
|
||||||
{
|
{
|
||||||
set -x
|
set -x
|
||||||
@ -137,8 +159,7 @@ function run_tests()
|
|||||||
|
|
||||||
ADDITIONAL_OPTIONS+=('--report-logs-stats')
|
ADDITIONAL_OPTIONS+=('--report-logs-stats')
|
||||||
|
|
||||||
clickhouse-test "00001_select_1" > /dev/null ||:
|
try_run_with_retry 10 clickhouse-client -q "insert into system.zookeeper (name, path, value) values ('auxiliary_zookeeper2', '/test/chroot/', '')"
|
||||||
clickhouse-client -q "insert into system.zookeeper (name, path, value) values ('auxiliary_zookeeper2', '/test/chroot/', '')" ||:
|
|
||||||
|
|
||||||
set +e
|
set +e
|
||||||
clickhouse-test --testname --shard --zookeeper --check-zookeeper-session --hung-check --print-time \
|
clickhouse-test --testname --shard --zookeeper --check-zookeeper-session --hung-check --print-time \
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user