mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-15 12:14:18 +00:00
Merge branch 'master' into symlinks
This commit is contained in:
commit
69cbf80daf
3
.github/PULL_REQUEST_TEMPLATE.md
vendored
3
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -59,6 +59,9 @@ At a minimum, the following information should be added (but add more as needed)
|
||||
- [ ] <!---ci_exclude_tsan|msan|ubsan|coverage--> Exclude: All with TSAN, MSAN, UBSAN, Coverage
|
||||
- [ ] <!---ci_exclude_aarch64|release|debug--> Exclude: All with aarch64, release, debug
|
||||
---
|
||||
- [ ] <!---ci_include_fuzzer--> Run only fuzzers related jobs (libFuzzer fuzzers, AST fuzzers, etc.)
|
||||
- [ ] <!---ci_exclude_ast--> Exclude: AST fuzzers
|
||||
---
|
||||
- [ ] <!---do_not_test--> Do not test
|
||||
- [ ] <!---woolen_wolfdog--> Woolen Wolfdog
|
||||
- [ ] <!---upload_all--> Upload binaries for special builds
|
||||
|
20
.github/actions/clean/action.yml
vendored
20
.github/actions/clean/action.yml
vendored
@ -1,11 +1,23 @@
|
||||
name: Clean runner
|
||||
description: Clean the runner's temp path on ending
|
||||
inputs:
|
||||
images:
|
||||
description: clean docker images
|
||||
default: false
|
||||
type: boolean
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Clean
|
||||
- name: Clean Temp
|
||||
shell: bash
|
||||
run: |
|
||||
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||
sudo rm -fr "${{runner.temp}}"
|
||||
sudo rm -fr "${{runner.temp}}"
|
||||
- name: Clean Docker Containers
|
||||
shell: bash
|
||||
run: |
|
||||
docker rm -vf $(docker ps -aq) ||:
|
||||
- name: Clean Docker Images
|
||||
if: ${{ inputs.images }}
|
||||
shell: bash
|
||||
run: |
|
||||
docker rmi -f $(docker images -aq) ||:
|
||||
|
34
.github/actions/debug/action.yml
vendored
Normal file
34
.github/actions/debug/action.yml
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
name: DebugInfo
|
||||
description: Prints workflow debug info
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Envs, event.json and contexts
|
||||
shell: bash
|
||||
run: |
|
||||
echo '::group::Environment variables'
|
||||
env | sort
|
||||
echo '::endgroup::'
|
||||
|
||||
echo '::group::event.json'
|
||||
python3 -m json.tool "$GITHUB_EVENT_PATH"
|
||||
echo '::endgroup::'
|
||||
|
||||
cat << 'EOF'
|
||||
::group::github context
|
||||
${{ toJSON(github) }}
|
||||
::endgroup::
|
||||
|
||||
::group::env context
|
||||
${{ toJSON(env) }}
|
||||
::endgroup::
|
||||
|
||||
::group::runner context
|
||||
${{ toJSON(runner) }}
|
||||
::endgroup::
|
||||
|
||||
::group::job context
|
||||
${{ toJSON(job) }}
|
||||
::endgroup::
|
||||
EOF
|
99
.github/workflows/auto_releases.yml
vendored
Normal file
99
.github/workflows/auto_releases.yml
vendored
Normal file
@ -0,0 +1,99 @@
|
||||
name: AutoReleases
|
||||
|
||||
env:
|
||||
PYTHONUNBUFFERED: 1
|
||||
|
||||
concurrency:
|
||||
group: autoreleases
|
||||
|
||||
on:
|
||||
# schedule:
|
||||
# - cron: '0 9 * * *'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
dry-run:
|
||||
description: 'Dry run'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
AutoReleaseInfo:
|
||||
runs-on: [self-hosted, release-maker]
|
||||
outputs:
|
||||
data: ${{ steps.info.outputs.AUTO_RELEASE_PARAMS }}
|
||||
dry_run: ${{ steps.info.outputs.DRY_RUN }}
|
||||
steps:
|
||||
- name: Set envs
|
||||
run: |
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
ROBOT_CLICKHOUSE_SSH_KEY<<RCSK
|
||||
${{secrets.ROBOT_CLICKHOUSE_SSH_KEY}}
|
||||
RCSK
|
||||
EOF
|
||||
echo "DRY_RUN=true" >> "$GITHUB_ENV"
|
||||
- name: Check out repository code
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
fetch-depth: 0 # full history needed
|
||||
- name: Debug Info
|
||||
uses: ./.github/actions/debug
|
||||
- name: Prepare Info
|
||||
id: info
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 auto_release.py --prepare
|
||||
echo "::group::Auto Release Info"
|
||||
python3 -m json.tool /tmp/autorelease_info.json
|
||||
echo "::endgroup::"
|
||||
{
|
||||
echo 'AUTO_RELEASE_PARAMS<<EOF'
|
||||
cat /tmp/autorelease_params.json
|
||||
echo 'EOF'
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
if [[ "${{ github.event_name }}" == "schedule" ]]; then
|
||||
echo "DRY_RUN=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "DRY_RUN=${{ github.event.inputs.dry-run }}" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
- name: Post Release Branch statuses
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 auto_release.py --post-status
|
||||
- name: Clean up
|
||||
uses: ./.github/actions/clean
|
||||
|
||||
Releases:
|
||||
needs: AutoReleaseInfo
|
||||
strategy:
|
||||
matrix:
|
||||
release_params: ${{ fromJson(needs.AutoReleaseInfo.outputs.data).releases }}
|
||||
max-parallel: 1
|
||||
name: Release ${{ matrix.release_params.release_branch }}
|
||||
uses: ./.github/workflows/create_release.yml
|
||||
with:
|
||||
ref: ${{ matrix.release_params.commit_sha }}
|
||||
type: patch
|
||||
dry-run: ${{ fromJson(needs.AutoReleaseInfo.outputs.dry_run) }}
|
||||
secrets:
|
||||
ROBOT_CLICKHOUSE_COMMIT_TOKEN: ${{ secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN }}
|
||||
|
||||
CleanUp:
|
||||
needs: [Releases]
|
||||
runs-on: [self-hosted, release-maker]
|
||||
steps:
|
||||
- uses: ./.github/actions/clean
|
||||
with:
|
||||
images: true
|
||||
|
||||
# PostSlackMessage:
|
||||
# needs: [Releases]
|
||||
# runs-on: [self-hosted, release-maker]
|
||||
# if: ${{ !cancelled() }}
|
||||
# steps:
|
||||
# - name: Check out repository code
|
||||
# uses: ClickHouse/checkout@v1
|
||||
# - name: Post
|
||||
# run: |
|
||||
# cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
# python3 auto_release.py --post-auto-release-complete --wf-status ${{ job.status }}
|
2
.github/workflows/backport_branches.yml
vendored
2
.github/workflows/backport_branches.yml
vendored
@ -27,6 +27,8 @@ jobs:
|
||||
clear-repository: true # to ensure correct digests
|
||||
fetch-depth: 0 # to get version
|
||||
filter: tree:0
|
||||
- name: Debug Info
|
||||
uses: ./.github/actions/debug
|
||||
- name: Labels check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
|
2
.github/workflows/cherry_pick.yml
vendored
2
.github/workflows/cherry_pick.yml
vendored
@ -33,6 +33,8 @@ jobs:
|
||||
clear-repository: true
|
||||
token: ${{secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN}}
|
||||
fetch-depth: 0
|
||||
- name: Debug Info
|
||||
uses: ./.github/actions/debug
|
||||
- name: Cherry pick
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
|
30
.github/workflows/create_release.yml
vendored
30
.github/workflows/create_release.yml
vendored
@ -2,6 +2,7 @@ name: CreateRelease
|
||||
|
||||
concurrency:
|
||||
group: release
|
||||
|
||||
'on':
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
@ -26,6 +27,28 @@ concurrency:
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
workflow_call:
|
||||
inputs:
|
||||
ref:
|
||||
description: 'Git reference (branch or commit sha) from which to create the release'
|
||||
required: true
|
||||
type: string
|
||||
type:
|
||||
description: 'The type of release: "new" for a new release or "patch" for a patch release'
|
||||
required: true
|
||||
type: string
|
||||
only-repo:
|
||||
description: 'Run only repos updates including docker (repo-recovery, tests)'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
dry-run:
|
||||
description: 'Dry run'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
secrets:
|
||||
ROBOT_CLICKHOUSE_COMMIT_TOKEN:
|
||||
|
||||
jobs:
|
||||
CreateRelease:
|
||||
@ -33,13 +56,13 @@ jobs:
|
||||
GH_TOKEN: ${{ secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN }}
|
||||
runs-on: [self-hosted, release-maker]
|
||||
steps:
|
||||
- name: DebugInfo
|
||||
uses: hmarr/debug-action@f7318c783045ac39ed9bb497e22ce835fdafbfe6
|
||||
- name: Check out repository code
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
token: ${{secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN}}
|
||||
fetch-depth: 0
|
||||
- name: Debug Info
|
||||
uses: ./.github/actions/debug
|
||||
- name: Prepare Release Info
|
||||
shell: bash
|
||||
run: |
|
||||
@ -101,6 +124,7 @@ jobs:
|
||||
--volume=".:/wd" --workdir="/wd" \
|
||||
clickhouse/style-test \
|
||||
./tests/ci/changelog.py -v --debug-helpers \
|
||||
--gh-user-or-token ${{ secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN }} \
|
||||
--jobs=5 \
|
||||
--output="./docs/changelogs/${{ env.RELEASE_TAG }}.md" ${{ env.RELEASE_TAG }}
|
||||
git add ./docs/changelogs/${{ env.RELEASE_TAG }}.md
|
||||
@ -129,9 +153,9 @@ jobs:
|
||||
if: ${{ inputs.type == 'patch' && ! inputs.only-repo }}
|
||||
shell: bash
|
||||
run: |
|
||||
python3 ./tests/ci/create_release.py --set-progress-completed
|
||||
git reset --hard HEAD
|
||||
git checkout "$GITHUB_REF_NAME"
|
||||
python3 ./tests/ci/create_release.py --set-progress-completed
|
||||
- name: Create GH Release
|
||||
if: ${{ inputs.type == 'patch' && ! inputs.only-repo }}
|
||||
shell: bash
|
||||
|
1
.github/workflows/docker_test_images.yml
vendored
1
.github/workflows/docker_test_images.yml
vendored
@ -11,6 +11,7 @@ name: Build docker images
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
jobs:
|
||||
DockerBuildAarch64:
|
||||
runs-on: [self-hosted, style-checker-aarch64]
|
||||
|
9
.github/workflows/jepsen.yml
vendored
9
.github/workflows/jepsen.yml
vendored
@ -8,27 +8,28 @@ on: # yamllint disable-line rule:truthy
|
||||
schedule:
|
||||
- cron: '0 */6 * * *'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
RunConfig:
|
||||
runs-on: [self-hosted, style-checker-aarch64]
|
||||
outputs:
|
||||
data: ${{ steps.runconfig.outputs.CI_DATA }}
|
||||
steps:
|
||||
- name: DebugInfo
|
||||
uses: hmarr/debug-action@f7318c783045ac39ed9bb497e22ce835fdafbfe6
|
||||
- name: Check out repository code
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true # to ensure correct digests
|
||||
fetch-depth: 0 # to get version
|
||||
filter: tree:0
|
||||
- name: Debug Info
|
||||
uses: ./.github/actions/debug
|
||||
- name: PrepareRunConfig
|
||||
id: runconfig
|
||||
run: |
|
||||
echo "::group::configure CI run"
|
||||
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --configure --workflow "$GITHUB_WORKFLOW" --outfile ${{ runner.temp }}/ci_run_data.json
|
||||
echo "::endgroup::"
|
||||
|
||||
|
||||
echo "::group::CI run configure results"
|
||||
python3 -m json.tool ${{ runner.temp }}/ci_run_data.json
|
||||
echo "::endgroup::"
|
||||
@ -67,7 +68,7 @@ jobs:
|
||||
if: ${{ !cancelled() }}
|
||||
run: |
|
||||
export WORKFLOW_RESULT_FILE="/tmp/workflow_results.json"
|
||||
cat >> "$WORKFLOW_RESULT_FILE" << 'EOF'
|
||||
cat > "$WORKFLOW_RESULT_FILE" << 'EOF'
|
||||
${{ toJson(needs) }}
|
||||
EOF
|
||||
python3 ./tests/ci/ci_buddy.py --check-wf-status
|
||||
|
4
.github/workflows/master.yml
vendored
4
.github/workflows/master.yml
vendored
@ -15,14 +15,14 @@ jobs:
|
||||
outputs:
|
||||
data: ${{ steps.runconfig.outputs.CI_DATA }}
|
||||
steps:
|
||||
- name: DebugInfo
|
||||
uses: hmarr/debug-action@f7318c783045ac39ed9bb497e22ce835fdafbfe6
|
||||
- name: Check out repository code
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true # to ensure correct digests
|
||||
fetch-depth: 0 # to get version
|
||||
filter: tree:0
|
||||
- name: Debug Info
|
||||
uses: ./.github/actions/debug
|
||||
- name: Merge sync PR
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
|
4
.github/workflows/merge_queue.yml
vendored
4
.github/workflows/merge_queue.yml
vendored
@ -14,14 +14,14 @@ jobs:
|
||||
outputs:
|
||||
data: ${{ steps.runconfig.outputs.CI_DATA }}
|
||||
steps:
|
||||
- name: DebugInfo
|
||||
uses: hmarr/debug-action@f7318c783045ac39ed9bb497e22ce835fdafbfe6
|
||||
- name: Check out repository code
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true # to ensure correct digests
|
||||
fetch-depth: 0 # to get a version
|
||||
filter: tree:0
|
||||
- name: Debug Info
|
||||
uses: ./.github/actions/debug
|
||||
- name: Cancel PR workflow
|
||||
run: |
|
||||
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --cancel-previous-run
|
||||
|
4
.github/workflows/nightly.yml
vendored
4
.github/workflows/nightly.yml
vendored
@ -15,14 +15,14 @@ jobs:
|
||||
outputs:
|
||||
data: ${{ steps.runconfig.outputs.CI_DATA }}
|
||||
steps:
|
||||
- name: DebugInfo
|
||||
uses: hmarr/debug-action@f7318c783045ac39ed9bb497e22ce835fdafbfe6
|
||||
- name: Check out repository code
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true # to ensure correct digests
|
||||
fetch-depth: 0 # to get version
|
||||
filter: tree:0
|
||||
- name: Debug Info
|
||||
uses: ./.github/actions/debug
|
||||
- name: PrepareRunConfig
|
||||
id: runconfig
|
||||
run: |
|
||||
|
4
.github/workflows/pull_request.yml
vendored
4
.github/workflows/pull_request.yml
vendored
@ -25,14 +25,14 @@ jobs:
|
||||
outputs:
|
||||
data: ${{ steps.runconfig.outputs.CI_DATA }}
|
||||
steps:
|
||||
- name: DebugInfo
|
||||
uses: hmarr/debug-action@f7318c783045ac39ed9bb497e22ce835fdafbfe6
|
||||
- name: Check out repository code
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true # to ensure correct digests
|
||||
fetch-depth: 0 # to get a version
|
||||
filter: tree:0
|
||||
- name: Debug Info
|
||||
uses: ./.github/actions/debug
|
||||
- name: Cancel previous Sync PR workflow
|
||||
run: |
|
||||
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --cancel-previous-run
|
||||
|
6
.github/workflows/release_branches.yml
vendored
6
.github/workflows/release_branches.yml
vendored
@ -24,6 +24,8 @@ jobs:
|
||||
clear-repository: true # to ensure correct digests
|
||||
fetch-depth: 0 # to get version
|
||||
filter: tree:0
|
||||
- name: Debug Info
|
||||
uses: ./.github/actions/debug
|
||||
- name: Labels check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
@ -130,6 +132,7 @@ jobs:
|
||||
with:
|
||||
build_name: package_debug
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
force: true
|
||||
BuilderBinDarwin:
|
||||
needs: [RunConfig, BuildDockers]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
@ -482,7 +485,7 @@ jobs:
|
||||
if: ${{ !failure() }}
|
||||
run: |
|
||||
# update overall ci report
|
||||
python3 finish_check.py --wf-status ${{ contains(needs.*.result, 'failure') && 'failure' || 'success' }}
|
||||
python3 ./tests/ci/finish_check.py --wf-status ${{ contains(needs.*.result, 'failure') && 'failure' || 'success' }}
|
||||
- name: Check Workflow results
|
||||
if: ${{ !cancelled() }}
|
||||
run: |
|
||||
@ -490,5 +493,4 @@ jobs:
|
||||
cat > "$WORKFLOW_RESULT_FILE" << 'EOF'
|
||||
${{ toJson(needs) }}
|
||||
EOF
|
||||
|
||||
python3 ./tests/ci/ci_buddy.py --check-wf-status
|
||||
|
4
.github/workflows/reusable_simple_job.yml
vendored
4
.github/workflows/reusable_simple_job.yml
vendored
@ -62,8 +62,6 @@ jobs:
|
||||
env:
|
||||
GITHUB_JOB_OVERRIDDEN: ${{inputs.test_name}}
|
||||
steps:
|
||||
- name: DebugInfo
|
||||
uses: hmarr/debug-action@f7318c783045ac39ed9bb497e22ce835fdafbfe6
|
||||
- name: Check out repository code
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
@ -72,6 +70,8 @@ jobs:
|
||||
submodules: ${{inputs.submodules}}
|
||||
fetch-depth: ${{inputs.checkout_depth}}
|
||||
filter: tree:0
|
||||
- name: Debug Info
|
||||
uses: ./.github/actions/debug
|
||||
- name: Set build envs
|
||||
run: |
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
|
22
.gitmodules
vendored
22
.gitmodules
vendored
@ -108,7 +108,7 @@
|
||||
url = https://github.com/ClickHouse/icudata
|
||||
[submodule "contrib/icu"]
|
||||
path = contrib/icu
|
||||
url = https://github.com/unicode-org/icu
|
||||
url = https://github.com/ClickHouse/icu
|
||||
[submodule "contrib/flatbuffers"]
|
||||
path = contrib/flatbuffers
|
||||
url = https://github.com/ClickHouse/flatbuffers
|
||||
@ -170,9 +170,6 @@
|
||||
[submodule "contrib/fast_float"]
|
||||
path = contrib/fast_float
|
||||
url = https://github.com/fastfloat/fast_float
|
||||
[submodule "contrib/libpq"]
|
||||
path = contrib/libpq
|
||||
url = https://github.com/ClickHouse/libpq
|
||||
[submodule "contrib/NuRaft"]
|
||||
path = contrib/NuRaft
|
||||
url = https://github.com/ClickHouse/NuRaft
|
||||
@ -230,9 +227,6 @@
|
||||
[submodule "contrib/minizip-ng"]
|
||||
path = contrib/minizip-ng
|
||||
url = https://github.com/zlib-ng/minizip-ng
|
||||
[submodule "contrib/annoy"]
|
||||
path = contrib/annoy
|
||||
url = https://github.com/ClickHouse/annoy
|
||||
[submodule "contrib/qpl"]
|
||||
path = contrib/qpl
|
||||
url = https://github.com/intel/qpl
|
||||
@ -341,16 +335,13 @@
|
||||
url = https://github.com/graphitemaster/incbin.git
|
||||
[submodule "contrib/usearch"]
|
||||
path = contrib/usearch
|
||||
url = https://github.com/unum-cloud/usearch.git
|
||||
url = https://github.com/ClickHouse/usearch.git
|
||||
[submodule "contrib/SimSIMD"]
|
||||
path = contrib/SimSIMD
|
||||
url = https://github.com/ashvardanian/SimSIMD.git
|
||||
[submodule "contrib/FP16"]
|
||||
path = contrib/FP16
|
||||
url = https://github.com/Maratyszcza/FP16.git
|
||||
[submodule "contrib/robin-map"]
|
||||
path = contrib/robin-map
|
||||
url = https://github.com/Tessil/robin-map.git
|
||||
[submodule "contrib/aklomp-base64"]
|
||||
path = contrib/aklomp-base64
|
||||
url = https://github.com/aklomp/base64.git
|
||||
@ -372,6 +363,15 @@
|
||||
[submodule "contrib/double-conversion"]
|
||||
path = contrib/double-conversion
|
||||
url = https://github.com/ClickHouse/double-conversion.git
|
||||
[submodule "contrib/mongo-cxx-driver"]
|
||||
path = contrib/mongo-cxx-driver
|
||||
url = https://github.com/ClickHouse/mongo-cxx-driver.git
|
||||
[submodule "contrib/mongo-c-driver"]
|
||||
path = contrib/mongo-c-driver
|
||||
url = https://github.com/ClickHouse/mongo-c-driver.git
|
||||
[submodule "contrib/numactl"]
|
||||
path = contrib/numactl
|
||||
url = https://github.com/ClickHouse/numactl.git
|
||||
[submodule "contrib/postgres"]
|
||||
path = contrib/postgres
|
||||
url = https://github.com/ClickHouse/postgres.git
|
||||
|
311
CHANGELOG.md
311
CHANGELOG.md
@ -1,4 +1,6 @@
|
||||
### Table of Contents
|
||||
**[ClickHouse release v24.9, 2024-09-26](#249)**<br/>
|
||||
**[ClickHouse release v24.8 LTS, 2024-08-20](#248)**<br/>
|
||||
**[ClickHouse release v24.7, 2024-07-30](#247)**<br/>
|
||||
**[ClickHouse release v24.6, 2024-07-01](#246)**<br/>
|
||||
**[ClickHouse release v24.5, 2024-05-30](#245)**<br/>
|
||||
@ -10,6 +12,315 @@
|
||||
|
||||
# 2024 Changelog
|
||||
|
||||
### <a id="249"></a> ClickHouse release 24.9, 2024-09-26
|
||||
|
||||
#### Backward Incompatible Change
|
||||
* Expressions like `a[b].c` are supported for named tuples, as well as named subscripts from arbitrary expressions, e.g., `expr().name`. This is useful for processing JSON. This closes [#54965](https://github.com/ClickHouse/ClickHouse/issues/54965). In previous versions, an expression of form `expr().name` was parsed as `tupleElement(expr(), name)`, and the query analyzer was searching for a column `name` rather than for the corresponding tuple element; while in the new version, it is changed to `tupleElement(expr(), 'name')`. In most cases, the previous version was not working, but it is possible to imagine a very unusual scenario when this change could lead to incompatibility: if you stored names of tuple elements in a column or an alias, that was named differently than the tuple element's name: `SELECT 'b' AS a, CAST([tuple(123)] AS 'Array(Tuple(b UInt8))') AS t, t[1].a`. It is very unlikely that you used such queries, but we still have to mark this change as potentially backward incompatible. [#68435](https://github.com/ClickHouse/ClickHouse/pull/68435) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||
* When the setting `print_pretty_type_names` is enabled, it will print `Tuple` data type in a pretty form in `SHOW CREATE TABLE` statements, `formatQuery` function, and in the interactive mode in `clickhouse-client` and `clickhouse-local`. In previous versions, this setting was only applied to `DESCRIBE` queries and `toTypeName`. This closes [#65753](https://github.com/ClickHouse/ClickHouse/issues/65753). [#68492](https://github.com/ClickHouse/ClickHouse/pull/68492) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||
* Do not allow explicitly specifying UUID when creating a table in `Replicated` databases. Also, do not allow explicitly specifying Keeper path and replica name for *MergeTree tables in Replicated databases. It introduces a new setting `database_replicated_allow_explicit_uuid` and changes the type of `database_replicated_allow_replicated_engine_arguments` from Bool to UInt64 [#66104](https://github.com/ClickHouse/ClickHouse/pull/66104) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||
|
||||
#### New Feature
|
||||
* Allow a user to have multiple authentication methods instead of only one. Allow authentication methods to be reset to most recently added method. If you want to run instances on 24.8 and one on 24.9 for some time, it's better to set `max_authentication_methods_per_user` = 1 for that period to avoid potential incompatibilities. [#65277](https://github.com/ClickHouse/ClickHouse/pull/65277) ([Arthur Passos](https://github.com/arthurpassos)).
|
||||
* Function `toStartOfInterval` now has a new overload which emulates TimescaleDB's `time_bucket` function, and respectively, PostgreSQL's `date_bin` function. ([#55619](https://github.com/ClickHouse/ClickHouse/issues/55619)). It allows to align date or timestamp values to multiples of a given interval from an *arbitrary* origin (instead of 0000-01-01 00:00:00.000 as *fixed* origin). For example, `SELECT toStartOfInterval(toDateTime('2023-01-01 14:45:00'), INTERVAL 1 MINUTE, toDateTime('2023-01-01 14:35:30'));` returns `2023-01-01 14:44:30` which is a multiple of 1 minute intervals, starting from origin `2023-01-01 14:35:30`. [#56738](https://github.com/ClickHouse/ClickHouse/pull/56738) ([Yarik Briukhovetskyi](https://github.com/yariks5s)).
|
||||
* Add support for `ATTACH PARTITION ALL FROM`. [#61987](https://github.com/ClickHouse/ClickHouse/pull/61987) ([Kirill Nikiforov](https://github.com/allmazz)).
|
||||
* Introduced `JSONCompactWithProgress` format where ClickHouse outputs each row as a newline-delimited JSON object, including metadata, data, progress, totals, and statistics. [#66205](https://github.com/ClickHouse/ClickHouse/pull/66205) ([Alexey Korepanov](https://github.com/alexkorep)).
|
||||
* Add the `input_format_json_empty_as_default` setting which, when enabled, treats empty fields in JSON inputs as default values. Closes [#59339](https://github.com/ClickHouse/ClickHouse/issues/59339). [#66782](https://github.com/ClickHouse/ClickHouse/pull/66782) ([Alexis Arnaud](https://github.com/a-a-f)).
|
||||
* Added functions `overlay` and `overlayUTF8` which replace parts of a string by another string. Example: `SELECT overlay('Hello New York', 'Jersey', 11)` returns `Hello New Jersey`. [#66933](https://github.com/ClickHouse/ClickHouse/pull/66933) ([李扬](https://github.com/taiyang-li)).
|
||||
* Add support for lightweight deletes in partition `DELETE FROM [db.]table [ON CLUSTER cluster] [IN PARTITION partition_expr] WHERE expr; ` [#67805](https://github.com/ClickHouse/ClickHouse/pull/67805) ([sunny](https://github.com/sunny19930321)).
|
||||
* Implemented comparison for `Interval` data type values of different domains (such as seconds and minutes) so they are converting now to the least supertype. [#68057](https://github.com/ClickHouse/ClickHouse/pull/68057) ([Yarik Briukhovetskyi](https://github.com/yariks5s)).
|
||||
* Add `create_if_not_exists` setting to default to `IF NOT EXISTS` behavior during CREATE statements. [#68164](https://github.com/ClickHouse/ClickHouse/pull/68164) ([Peter Nguyen](https://github.com/petern48)).
|
||||
* Makes it possible to read `Iceberg` tables in Azure and locally. [#68210](https://github.com/ClickHouse/ClickHouse/pull/68210) ([Daniil Ivanik](https://github.com/divanik)).
|
||||
* Query cache entries can now be dropped by tag. For example, the query cache entry created by `SELECT 1 SETTINGS use_query_cache = true, query_cache_tag = 'abc'` can now be dropped by `SYSTEM DROP QUERY CACHE TAG 'abc'`. [#68477](https://github.com/ClickHouse/ClickHouse/pull/68477) ([Michał Tabaszewski](https://github.com/pinsvin00)).
|
||||
* Add storage encryption for named collections. [#68615](https://github.com/ClickHouse/ClickHouse/pull/68615) ([Pablo Marcos](https://github.com/pamarcos)).
|
||||
* Added `RIPEMD160` function, which computes the RIPEMD-160 cryptographic hash of a string. Example: `SELECT hex(RIPEMD160('The quick brown fox jumps over the lazy dog'))` returns `37F332F68DB77BD9D7EDD4969571AD671CF9DD3B`. [#68639](https://github.com/ClickHouse/ClickHouse/pull/68639) ([Dergousov Maxim](https://github.com/m7kss1)).
|
||||
* Add virtual column `_headers` for the `URL` table engine. Closes [#65026](https://github.com/ClickHouse/ClickHouse/issues/65026). [#68867](https://github.com/ClickHouse/ClickHouse/pull/68867) ([flynn](https://github.com/ucasfl)).
|
||||
* Add `system.projections` table to track available projections. [#68901](https://github.com/ClickHouse/ClickHouse/pull/68901) ([Jordi Villar](https://github.com/jrdi)).
|
||||
* Add new function `arrayZipUnaligned` for spark compatibility (which is named `arrays_zip` in Spark), which allowed unaligned arrays based on original `arrayZip`. ``` sql SELECT arrayZipUnaligned([1], [1, 2, 3]). [#69030](https://github.com/ClickHouse/ClickHouse/pull/69030) ([李扬](https://github.com/taiyang-li)).
|
||||
* Added `cp`/`mv` commands for the keeper client command line application which atomically copies/moves node. [#69034](https://github.com/ClickHouse/ClickHouse/pull/69034) ([Mikhail Artemenko](https://github.com/Michicosun)).
|
||||
* Adds argument `scale` (default: `true`) to function `arrayAUC` which allows to skip the normalization step (issue [#69609](https://github.com/ClickHouse/ClickHouse/issues/69609)). [#69717](https://github.com/ClickHouse/ClickHouse/pull/69717) ([gabrielmcg44](https://github.com/gabrielmcg44)).
|
||||
|
||||
#### Experimental feature
|
||||
* Adds a setting `input_format_try_infer_variants` which allows `Variant` type to be inferred during schema inference for text formats when there is more than one possible type for column/array elements. [#63798](https://github.com/ClickHouse/ClickHouse/pull/63798) ([Shaun Struwig](https://github.com/Blargian)).
|
||||
* Add aggregate functions `distinctDynamicTypes`/`distinctJSONPaths`/`distinctJSONPathsAndTypes` for better introspection of JSON column type content. [#68463](https://github.com/ClickHouse/ClickHouse/pull/68463) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||
* New algorithm to determine the unit of marks distribution between parallel replicas by a consistent hash. Different numbers of marks chosen for different read patterns to improve performance. [#68424](https://github.com/ClickHouse/ClickHouse/pull/68424) ([Nikita Taranov](https://github.com/nickitat)).
|
||||
* Previously the algorithmic complexity of part deduplication logic in parallel replica announcement handling was O(n^2) which could take noticeable time for tables with many part (or partitions). This change makes the complexity O(n*log(n)). [#69596](https://github.com/ClickHouse/ClickHouse/pull/69596) ([Alexander Gololobov](https://github.com/davenger)).
|
||||
* Refreshable materialized view improvements: append mode (`... REFRESH EVERY 1 MINUTE APPEND ...`) to add rows to existing table instead of overwriting the whole table, retries (disabled by default, configured in SETTINGS section of the query), `SYSTEM WAIT VIEW <name>` query that waits for the currently running refresh, some fixes. [#58934](https://github.com/ClickHouse/ClickHouse/pull/58934) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||
* Added `min_max`as a new type of (experimental) statistics. It supports estimating range predicates over numeric columns, e.g. `x < 100`. [#67013](https://github.com/ClickHouse/ClickHouse/pull/67013) ([JackyWoo](https://github.com/JackyWoo)).
|
||||
* Improve castOrDefault from Variant/Dynamic columns so it works when inner types are not convertable at all. [#67150](https://github.com/ClickHouse/ClickHouse/pull/67150) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||
* Replication of subset of columns is now available through MaterializedPostgreSQL. Closes [#33748](https://github.com/ClickHouse/ClickHouse/issues/33748). [#69092](https://github.com/ClickHouse/ClickHouse/pull/69092) ([Kruglov Kirill](https://github.com/1on)).
|
||||
|
||||
#### Performance Improvement
|
||||
* Implemented reading of required files only for Hive partitioning. [#68963](https://github.com/ClickHouse/ClickHouse/pull/68963) ([Yarik Briukhovetskyi](https://github.com/yariks5s)).* Improve the JOIN performance by rearranging the right table by keys while the table keys are dense in the LEFT or INNER hash joins. [#60341](https://github.com/ClickHouse/ClickHouse/pull/60341) ([kevinyhzou](https://github.com/KevinyhZou)).
|
||||
* Improve ALL JOIN performance by appending the list of rows lazily. [#63677](https://github.com/ClickHouse/ClickHouse/pull/63677) ([kevinyhzou](https://github.com/KevinyhZou)).
|
||||
* Load filesystem cache metadata asynchronously during the boot process, in order to make restarts faster (controlled by setting `load_metadata_asynchronously`). [#65736](https://github.com/ClickHouse/ClickHouse/pull/65736) ([Daniel Pozo Escalona](https://github.com/danipozo)).
|
||||
* Functions `array` and `map` were optimized to process certain common cases much faster. [#67707](https://github.com/ClickHouse/ClickHouse/pull/67707) ([李扬](https://github.com/taiyang-li)).
|
||||
* Trivial optimize on ORC strings reading especially when a column contains no NULLs. [#67794](https://github.com/ClickHouse/ClickHouse/pull/67794) ([李扬](https://github.com/taiyang-li)).
|
||||
* Improved overall performance of merges by reducing the overhead of scheduling steps of merges. [#68016](https://github.com/ClickHouse/ClickHouse/pull/68016) ([Anton Popov](https://github.com/CurtizJ)).
|
||||
* Speed up requests to S3 when a profile is not set, credentials are not set, and IMDS is not available (for example, when you are querying a public bucket on a machine outside of a cloud). This closes [#52771](https://github.com/ClickHouse/ClickHouse/issues/52771). [#68082](https://github.com/ClickHouse/ClickHouse/pull/68082) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||
* Devirtualize format reader in `RowInputFormatWithNamesAndTypes` for some performance improvement. [#68437](https://github.com/ClickHouse/ClickHouse/pull/68437) ([李扬](https://github.com/taiyang-li)).
|
||||
* Add the parallel merge for `uniq` aggregate function when aggregating with a group by key to maximize the CPU utilization. [#68441](https://github.com/ClickHouse/ClickHouse/pull/68441) ([Jiebin Sun](https://github.com/jiebinn)).
|
||||
* Add settings `output_format_orc_dictionary_key_size_threshold` to allow user to enable dict encoding for string column in `ORC` output format. It helps reduce the output `ORC` file size and improve reading performance significantly. [#68591](https://github.com/ClickHouse/ClickHouse/pull/68591) ([李扬](https://github.com/taiyang-li)).
|
||||
* Introduce new Keeper request RemoveRecursive which removes node with all it's subtree. [#69332](https://github.com/ClickHouse/ClickHouse/pull/69332) ([Mikhail Artemenko](https://github.com/Michicosun)).
|
||||
* Speedup insertion performance into a table with a vector similarity index by adding data to the vector index in parallel. [#69493](https://github.com/ClickHouse/ClickHouse/pull/69493) ([flynn](https://github.com/ucasfl)).
|
||||
* Reduce memory usage of inserts to JSON by using adaptive write buffer size. A lot of files created by JSON column in wide part contains small amount of data and it doesn't make sense to allocate 1MB buffer for them. [#69272](https://github.com/ClickHouse/ClickHouse/pull/69272) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||
* Avoid returning a thread in the concurrent hash join threadpool to avoid query excessively spawn threads. [#69406](https://github.com/ClickHouse/ClickHouse/pull/69406) ([Duc Canh Le](https://github.com/canhld94)).
|
||||
|
||||
#### Improvement
|
||||
* CREATE TABLE AS now copies PRIMARY KEY, ORDER BY, and similar clauses. Now it supports only for MergeTree family of table engines. [#69076](https://github.com/ClickHouse/ClickHouse/pull/69076) ([sakulali](https://github.com/sakulali)).
|
||||
* Hardened parts of the codebase related to parsing of small entities. The following (minor) bugs were found and fixed: - if a `DeltaLake` table is partitioned by Bool, the partition value is always interpreted as false; - `ExternalDistributed` table was using only a single shard in the provided addresses; the value of `max_threads` setting and similar were printed as `'auto(N)'` instead of `auto(N)`. [#52503](https://github.com/ClickHouse/ClickHouse/pull/52503) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||
* Use cgroup-specific metrics for CPU usage accounting instead of system-wide metrics. [#62003](https://github.com/ClickHouse/ClickHouse/pull/62003) ([Nikita Taranov](https://github.com/nickitat)).
|
||||
* IO scheduling for remote S3 disks is now done on the level of HTTP socket streams (instead of the whole S3 requests) to resolve `bandwidth_limit` throttling issues. [#65182](https://github.com/ClickHouse/ClickHouse/pull/65182) ([Sergei Trifonov](https://github.com/serxa)).
|
||||
* Functions `upperUTF8` and `lowerUTF8` were previously only able to uppercase / lowercase Cyrillic characters. This limitation is now removed and characters in arbitrary languages are uppercased/lowercased. Example: `SELECT upperUTF8('Süden')` now returns `SÜDEN`. [#65761](https://github.com/ClickHouse/ClickHouse/pull/65761) ([李扬](https://github.com/taiyang-li)).
|
||||
* When lightweight delete happens on a table with projection(s), despite users have choices either throw an exception (by default) or drop the projection when the lightweight delete would happen, now there is the third option to still have lightweight delete and then rebuild projection(s). [#66169](https://github.com/ClickHouse/ClickHouse/pull/66169) ([jsc0218](https://github.com/jsc0218)).
|
||||
* Two options (`dns_allow_resolve_names_to_ipv4` and `dns_allow_resolve_names_to_ipv6`) have been added, to allow block connections ip family. [#66895](https://github.com/ClickHouse/ClickHouse/pull/66895) ([MikhailBurdukov](https://github.com/MikhailBurdukov)).
|
||||
* Make Ctrl-Z ignorance configurable (ignore_shell_suspend) in clickhouse-client. [#67134](https://github.com/ClickHouse/ClickHouse/pull/67134) ([Azat Khuzhin](https://github.com/azat)).
|
||||
* Improve UTF-8 validation in JSON output formats. Ensures that valid JSON is generated in the case of certain byte sequences in the result data. [#67938](https://github.com/ClickHouse/ClickHouse/pull/67938) ([mwoenker](https://github.com/mwoenker)).
|
||||
* Added profile events for merges and mutations for better introspection. [#68015](https://github.com/ClickHouse/ClickHouse/pull/68015) ([Anton Popov](https://github.com/CurtizJ)).
|
||||
* ODBC: get http_max_tries from the server configuration. [#68128](https://github.com/ClickHouse/ClickHouse/pull/68128) ([Rodolphe Dugé de Bernonville](https://github.com/RodolpheDuge)).
|
||||
* Add wildcard support for user identification in X.509 SubjectAltName extension. [#68236](https://github.com/ClickHouse/ClickHouse/pull/68236) ([Marco Vilas Boas](https://github.com/marco-vb)).
|
||||
* Improve schema inference of date times. Now `DateTime64` used only when date time has fractional part, otherwise regular DateTime is used. Inference of Date/DateTime is more strict now, especially when `date_time_input_format='best_effort'` to avoid inferring date times from strings in corner cases. [#68382](https://github.com/ClickHouse/ClickHouse/pull/68382) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||
* Delete old code of named collections from dictionaries and substitute it to the new, which allows to use DDL created named collections in dictionaries. Closes [#60936](https://github.com/ClickHouse/ClickHouse/issues/60936), closes [#36890](https://github.com/ClickHouse/ClickHouse/issues/36890). [#68412](https://github.com/ClickHouse/ClickHouse/pull/68412) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||
* Use HTTP/1.1 instead of HTTP/1.0 (set by default) for external HTTP authenticators. [#68456](https://github.com/ClickHouse/ClickHouse/pull/68456) ([Aleksei Filatov](https://github.com/aalexfvk)).
|
||||
* Added a new set of metrics for thread pool introspection, providing deeper insights into thread pool performance and behavior. [#68674](https://github.com/ClickHouse/ClickHouse/pull/68674) ([filimonov](https://github.com/filimonov)).
|
||||
* Support query parameters in async inserts with format `Values`. [#68741](https://github.com/ClickHouse/ClickHouse/pull/68741) ([Anton Popov](https://github.com/CurtizJ)).
|
||||
* Support `Date32` on `dateTrunc` and `toStartOfInterval`. [#68874](https://github.com/ClickHouse/ClickHouse/pull/68874) ([LiuNeng](https://github.com/liuneng1994)).
|
||||
* Add `plan_step_name` and `plan_step_description` columns to `system.processors_profile_log`. [#68954](https://github.com/ClickHouse/ClickHouse/pull/68954) ([Alexander Gololobov](https://github.com/davenger)).
|
||||
* Support for the Spanish language in the embedded dictionaries. [#69035](https://github.com/ClickHouse/ClickHouse/pull/69035) ([Vasily Okunev](https://github.com/VOkunev)).
|
||||
* Add CPU arch to the short fault information message. [#69037](https://github.com/ClickHouse/ClickHouse/pull/69037) ([Konstantin Bogdanov](https://github.com/thevar1able)).
|
||||
* Queries will fail faster if a new Keeper connection cannot be established during retries. [#69148](https://github.com/ClickHouse/ClickHouse/pull/69148) ([Raúl Marín](https://github.com/Algunenano)).
|
||||
* Update Database Factory so it would be possible for user defined database engines to have arguments, settings and table overrides (similar to StorageFactory). [#69201](https://github.com/ClickHouse/ClickHouse/pull/69201) ([Барыкин Никита Олегович](https://github.com/NikBarykin)).
|
||||
* Restore mode that replaces all external table engines and functions to the `Null` engine (`restore_replace_external_engines_to_null`, `restore_replace_external_table_functions_to_null` settings) was failing if table had SETTINGS. Now it removes settings from table definition in this case and allows to restore such tables. [#69253](https://github.com/ClickHouse/ClickHouse/pull/69253) ([Ilya Yatsishin](https://github.com/qoega)).
|
||||
* CLICKHOUSE_PASSWORD is correctly escaped for XML in clickhouse image's entrypoint. [#69301](https://github.com/ClickHouse/ClickHouse/pull/69301) ([aohoyd](https://github.com/aohoyd)).
|
||||
* Allow empty arguments for `arrayZip`/`arrayZipUnaligned`, as concat did in https://github.com/ClickHouse/ClickHouse/pull/65887. It is for spark compatiability in Gluten CH Backend. [#69576](https://github.com/ClickHouse/ClickHouse/pull/69576) ([李扬](https://github.com/taiyang-li)).
|
||||
* Support more advanced SSL options for Keeper's internal communication (e.g. private keys with passphrase). [#69582](https://github.com/ClickHouse/ClickHouse/pull/69582) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||
* Index analysis can take noticeable time for big tables with many parts or partitions. This change should enable killing a heavy query at that stage. [#69606](https://github.com/ClickHouse/ClickHouse/pull/69606) ([Alexander Gololobov](https://github.com/davenger)).
|
||||
* Masking sensitive info in `gcs` table function. [#69611](https://github.com/ClickHouse/ClickHouse/pull/69611) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||
* Rebuild projection for merges that reduce number of rows. [#62364](https://github.com/ClickHouse/ClickHouse/pull/62364) ([cangyin](https://github.com/cangyin)).
|
||||
|
||||
#### Bug Fix (user-visible misbehavior in an official stable release)
|
||||
* Fix attaching table when pg dbname contains "-" in the experimental and unsupported MaterializedPostgreSQL engine. [#62730](https://github.com/ClickHouse/ClickHouse/pull/62730) ([takakawa](https://github.com/takakawa)).
|
||||
* Fixed error on generated columns in the experimental and totally unsupported MaterializedPostgreSQL engine when adnum ordering is broken [#63161](https://github.com/ClickHouse/ClickHouse/issues/63161). Fixed error on id column with nextval expression as default in the experimental and totally unsupported MaterializedPostgreSQL when there are generated columns in table. Fixed error on dropping publication with symbols except \[a-z1-9-\]. [#67664](https://github.com/ClickHouse/ClickHouse/pull/67664) ([Kruglov Kirill](https://github.com/1on)).
|
||||
* Storage Join to support Nullable columns in the left table, close [#61247](https://github.com/ClickHouse/ClickHouse/issues/61247). [#66926](https://github.com/ClickHouse/ClickHouse/pull/66926) ([vdimir](https://github.com/vdimir)).
|
||||
* Incorrect query result with parallel replicas (distribute queries as well) when `IN` operator contains conversion to Decimal(). The bug was introduced with the new analyzer. [#67234](https://github.com/ClickHouse/ClickHouse/pull/67234) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||
* Fix the problem that alter modify order by causes inconsistent metadata. [#67436](https://github.com/ClickHouse/ClickHouse/pull/67436) ([iceFireser](https://github.com/iceFireser)).
|
||||
* Fix the upper bound of the function `fromModifiedJulianDay`. It was supposed to be `9999-12-31` but was mistakenly set to `9999-01-01`. [#67583](https://github.com/ClickHouse/ClickHouse/pull/67583) ([PHO](https://github.com/depressed-pho)).
|
||||
* Fix when the index is not at the beginning of the tuple during `IN` query. [#67626](https://github.com/ClickHouse/ClickHouse/pull/67626) ([Yarik Briukhovetskyi](https://github.com/yariks5s)).
|
||||
* Fix expiration in `RoleCache`. [#67748](https://github.com/ClickHouse/ClickHouse/pull/67748) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||
* Fix window view missing blocks due to slow flush to view. [#67983](https://github.com/ClickHouse/ClickHouse/pull/67983) ([Raúl Marín](https://github.com/Algunenano)).
|
||||
* Fix MSan issue caused by incorrect date format. [#68105](https://github.com/ClickHouse/ClickHouse/pull/68105) ([JackyWoo](https://github.com/JackyWoo)).
|
||||
* Fixed crash in Parquet filtering when data types in the file substantially differ from requested types (e.g. `... FROM file('a.parquet', Parquet, 'x String')`, but the file has `x Int64`). Without this fix, use `input_format_parquet_filter_push_down = 0` as a workaround. [#68131](https://github.com/ClickHouse/ClickHouse/pull/68131) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||
* Fix crash in `lag`/`lead` which is introduced in [#67091](https://github.com/ClickHouse/ClickHouse/issues/67091). [#68262](https://github.com/ClickHouse/ClickHouse/pull/68262) ([lgbo](https://github.com/lgbo-ustc)).
|
||||
* Try fix postgres crash when query is cancelled. [#68288](https://github.com/ClickHouse/ClickHouse/pull/68288) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||
* After https://github.com/ClickHouse/ClickHouse/pull/61984 `schema_inference_make_columns_nullable=0` still can make columns `Nullable` in Parquet/Arrow formats. The change was backward incompatible and users noticed the changes in the behaviour. This PR makes `schema_inference_make_columns_nullable=0` to work as before (no Nullable columns will be inferred) and introduces new value `auto` for this setting that will make columns `Nullable` only if data has information about nullability. [#68298](https://github.com/ClickHouse/ClickHouse/pull/68298) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||
* Fixes [#50868](https://github.com/ClickHouse/ClickHouse/issues/50868). Small DateTime64 constant values returned by a nested subquery inside a distributed query were wrongly transformed to Nulls, thus causing errors and possible incorrect query results. [#68323](https://github.com/ClickHouse/ClickHouse/pull/68323) ([Shankar](https://github.com/shiyer7474)).
|
||||
* Fix missing sync replica mode in query `SYSTEM SYNC REPLICA`. [#68326](https://github.com/ClickHouse/ClickHouse/pull/68326) ([Duc Canh Le](https://github.com/canhld94)).
|
||||
* Fix bug in key condition. [#68354](https://github.com/ClickHouse/ClickHouse/pull/68354) ([Han Fei](https://github.com/hanfei1991)).
|
||||
* Fix crash on drop or rename a role that is used in LDAP external user directory. [#68355](https://github.com/ClickHouse/ClickHouse/pull/68355) ([Andrey Zvonov](https://github.com/zvonand)).
|
||||
* Fix Progress column value of system.view_refreshes greater than 1 [#68377](https://github.com/ClickHouse/ClickHouse/issues/68377). [#68378](https://github.com/ClickHouse/ClickHouse/pull/68378) ([megao](https://github.com/jetgm)).
|
||||
* Process regexp flags correctly. [#68389](https://github.com/ClickHouse/ClickHouse/pull/68389) ([Han Fei](https://github.com/hanfei1991)).
|
||||
* PostgreSQL-style cast operator (`::`) works correctly even for SQL-style hex and binary string literals (e.g., `SELECT x'414243'::String`). This closes [#68324](https://github.com/ClickHouse/ClickHouse/issues/68324). [#68482](https://github.com/ClickHouse/ClickHouse/pull/68482) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||
* Minor patch for https://github.com/ClickHouse/ClickHouse/pull/68131. [#68494](https://github.com/ClickHouse/ClickHouse/pull/68494) ([Chang chen](https://github.com/baibaichen)).
|
||||
* Fix [#68239](https://github.com/ClickHouse/ClickHouse/issues/68239) SAMPLE n where n is an integer. [#68499](https://github.com/ClickHouse/ClickHouse/pull/68499) ([Denis Hananein](https://github.com/denis-hananein)).
|
||||
* Fix bug in mann-whitney-utest when the size of two districutions are not equal. [#68556](https://github.com/ClickHouse/ClickHouse/pull/68556) ([Han Fei](https://github.com/hanfei1991)).
|
||||
* After unexpected restart, fail to start replication of ReplicatedMergeTree due to abnormal handling of covered-by-broken part. [#68584](https://github.com/ClickHouse/ClickHouse/pull/68584) ([baolin](https://github.com/baolinhuang)).
|
||||
* Fix `LOGICAL_ERROR`s when functions `sipHash64Keyed`, `sipHash128Keyed`, or `sipHash128ReferenceKeyed` are applied to empty arrays or tuples. [#68630](https://github.com/ClickHouse/ClickHouse/pull/68630) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||
* Full text index may filter out wrong columns when index multiple columns, it didn't reset row_id between different columns, the reproduce procedure is in tests/queries/0_stateless/03228_full_text_with_multi_col.sql. Without this. [#68644](https://github.com/ClickHouse/ClickHouse/pull/68644) ([siyuan](https://github.com/linkwk7)).
|
||||
* Fix invalid character '\t' and '\n' in replica_name when creating a Replicated table, which causes incorrect parsing of 'source replica' in LogEntry. Mentioned in issue [#68640](https://github.com/ClickHouse/ClickHouse/issues/68640). [#68645](https://github.com/ClickHouse/ClickHouse/pull/68645) ([Zhigao Hong](https://github.com/zghong)).
|
||||
* Added back virtual columns ` _table` and `_database` to distributed tables. They were available until version 24.3. [#68672](https://github.com/ClickHouse/ClickHouse/pull/68672) ([Anton Popov](https://github.com/CurtizJ)).
|
||||
* Fix possible error `Size of permutation (0) is less than required (...)` during Variant column permutation. [#68681](https://github.com/ClickHouse/ClickHouse/pull/68681) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||
* Fix possible error `DB::Exception: Block structure mismatch in joined block stream: different columns:` with new JSON column. [#68686](https://github.com/ClickHouse/ClickHouse/pull/68686) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||
* Fix issue with materialized constant keys when hashing maps with arrays as keys in functions `sipHash(64/128)Keyed`. [#68731](https://github.com/ClickHouse/ClickHouse/pull/68731) ([Salvatore Mesoraca](https://github.com/aiven-sal)).
|
||||
* Make `ColumnsDescription::toString` format each column using the same `IAST::FormatState object`. This results in uniform columns metadata being written to disk and ZooKeeper. [#68733](https://github.com/ClickHouse/ClickHouse/pull/68733) ([Miсhael Stetsyuk](https://github.com/mstetsyuk)).
|
||||
* Fix merging of aggregated data for grouping sets. [#68744](https://github.com/ClickHouse/ClickHouse/pull/68744) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||
* Fix logical error, when we create a replicated merge tree, alter a column and then execute modify statistics. [#68820](https://github.com/ClickHouse/ClickHouse/pull/68820) ([Han Fei](https://github.com/hanfei1991)).
|
||||
* Fix resolving dynamic subcolumns from subqueries in analyzer. [#68824](https://github.com/ClickHouse/ClickHouse/pull/68824) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||
* Fix complex types metadata parsing in DeltaLake. Closes [#68739](https://github.com/ClickHouse/ClickHouse/issues/68739). [#68836](https://github.com/ClickHouse/ClickHouse/pull/68836) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||
* Fixed asynchronous inserts in case when metadata of table is changed (by `ALTER ADD/MODIFY COLUMN` queries) after insert but before flush to the table. [#68837](https://github.com/ClickHouse/ClickHouse/pull/68837) ([Anton Popov](https://github.com/CurtizJ)).
|
||||
* Fix unexpected exception when passing empty tuple in array. This fixes [#68618](https://github.com/ClickHouse/ClickHouse/issues/68618). [#68848](https://github.com/ClickHouse/ClickHouse/pull/68848) ([Amos Bird](https://github.com/amosbird)).
|
||||
* Fix parsing pure metadata mutations commands. [#68935](https://github.com/ClickHouse/ClickHouse/pull/68935) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)).
|
||||
* Fix possible wrong result during anyHeavy state merge. [#68950](https://github.com/ClickHouse/ClickHouse/pull/68950) ([Raúl Marín](https://github.com/Algunenano)).
|
||||
* Fixed writing to Materialized Views with enabled setting `optimize_functions_to_subcolumns`. [#68951](https://github.com/ClickHouse/ClickHouse/pull/68951) ([Anton Popov](https://github.com/CurtizJ)).
|
||||
* Don't use serializations cache in const Dynamic column methods. It could let to use-of-uninitialized value or even race condition during aggregations. [#68953](https://github.com/ClickHouse/ClickHouse/pull/68953) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||
* Fix parsing error when null should be inserted as default in some cases during JSON type parsing. [#68955](https://github.com/ClickHouse/ClickHouse/pull/68955) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||
* Fix `Content-Encoding` not sent in some compressed responses. [#64802](https://github.com/ClickHouse/ClickHouse/issues/64802). [#68975](https://github.com/ClickHouse/ClickHouse/pull/68975) ([Konstantin Bogdanov](https://github.com/thevar1able)).
|
||||
* There were cases when path was concatenated incorrectly and had the `//` part in it, solving this problem using path normalization. [#69066](https://github.com/ClickHouse/ClickHouse/pull/69066) ([Yarik Briukhovetskyi](https://github.com/yariks5s)).
|
||||
* Fix logical error when we have empty async insert. [#69080](https://github.com/ClickHouse/ClickHouse/pull/69080) ([Han Fei](https://github.com/hanfei1991)).
|
||||
* Fixed data race of progress indication in clickhouse-client during query canceling. [#69081](https://github.com/ClickHouse/ClickHouse/pull/69081) ([Sergei Trifonov](https://github.com/serxa)).
|
||||
* Fix a bug that the vector similarity index (currently experimental) was not utilized when used with cosine distance as distance function. [#69090](https://github.com/ClickHouse/ClickHouse/pull/69090) ([flynn](https://github.com/ucasfl)).
|
||||
* This change addresses an issue where attempting to create a Replicated database again after a server failure during the initial creation process could result in error. [#69102](https://github.com/ClickHouse/ClickHouse/pull/69102) ([Miсhael Stetsyuk](https://github.com/mstetsyuk)).
|
||||
* Don't infer Bool type from String in CSV when `input_format_csv_try_infer_numbers_from_strings = 1` because we don't allow reading bool values from strings. [#69109](https://github.com/ClickHouse/ClickHouse/pull/69109) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||
* Fix explain ast insert queries parsing errors on client when `--multiquery` is enabled. [#69123](https://github.com/ClickHouse/ClickHouse/pull/69123) ([wxybear](https://github.com/wxybear)).
|
||||
* `UNION` clause in subqueries wasn't handled correctly in queries with parallel replicas and lead to LOGICAL_ERROR `Duplicate announcement received for replica`. [#69146](https://github.com/ClickHouse/ClickHouse/pull/69146) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||
* Fix propogating structure argument in s3Cluster. Previously the `DEFAULT` expression of the column could be lost when sending the query to the replicas in s3Cluster. [#69147](https://github.com/ClickHouse/ClickHouse/pull/69147) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||
* Respect format settings in Values format during conversion from expression to the destination type. [#69149](https://github.com/ClickHouse/ClickHouse/pull/69149) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||
* Fix `clickhouse-client --queries-file` for readonly users (previously fails with `Cannot modify 'log_comment' setting in readonly mode`). [#69175](https://github.com/ClickHouse/ClickHouse/pull/69175) ([Azat Khuzhin](https://github.com/azat)).
|
||||
* Fix data race in clickhouse-client when it's piped to a process that terminated early. [#69186](https://github.com/ClickHouse/ClickHouse/pull/69186) ([vdimir](https://github.com/vdimir)).
|
||||
* Fix incorrect results of Fix uniq and GROUP BY for JSON/Dynamic types. [#69203](https://github.com/ClickHouse/ClickHouse/pull/69203) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||
* Fix the INFILE format detection for asynchronous inserts. If the format is not explicitly defined in the FORMAT clause, it can be detected from the INFILE file extension. [#69237](https://github.com/ClickHouse/ClickHouse/pull/69237) ([Julia Kartseva](https://github.com/jkartseva)).
|
||||
* After [this issue](https://github.com/ClickHouse/ClickHouse/pull/59946#issuecomment-1943653197) there are quite a few table replicas in production such that their `metadata_version` node value is both equal to `0` and is different from the respective table's `metadata` node version. This leads to `alter` queries failing on such replicas. [#69274](https://github.com/ClickHouse/ClickHouse/pull/69274) ([Miсhael Stetsyuk](https://github.com/mstetsyuk)).
|
||||
* Mark Dynamic type as not safe primary key type to avoid issues with Fields. [#69311](https://github.com/ClickHouse/ClickHouse/pull/69311) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||
* Improve restoring of access entities' dependencies. [#69346](https://github.com/ClickHouse/ClickHouse/pull/69346) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||
* Fix undefined behavior when all connection attempts fail getting a connection for insertions. [#69390](https://github.com/ClickHouse/ClickHouse/pull/69390) ([Pablo Marcos](https://github.com/pamarcos)).
|
||||
* Close [#69135](https://github.com/ClickHouse/ClickHouse/issues/69135). If we try to reuse joined data for `cross` join, but this could not happen in ClickHouse at present. It's better to keep `have_compressed` in `reuseJoinedData`. [#69404](https://github.com/ClickHouse/ClickHouse/pull/69404) ([lgbo](https://github.com/lgbo-ustc)).
|
||||
* Make `materialize()` function return full column when parameter is a sparse column. [#69429](https://github.com/ClickHouse/ClickHouse/pull/69429) ([Alexander Gololobov](https://github.com/davenger)).
|
||||
* Fixed a `LOGICAL_ERROR` with function `sqidDecode` ([#69450](https://github.com/ClickHouse/ClickHouse/issues/69450)). [#69451](https://github.com/ClickHouse/ClickHouse/pull/69451) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||
* Quick fix for s3queue problem on 24.6 or create query with database replicated. [#69454](https://github.com/ClickHouse/ClickHouse/pull/69454) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||
* Fixed case when memory consumption was too high because of the squashing in `INSERT INTO ... SELECT` or `CREATE TABLE AS SELECT` queries. [#69469](https://github.com/ClickHouse/ClickHouse/pull/69469) ([Yarik Briukhovetskyi](https://github.com/yariks5s)).
|
||||
* Statements `SHOW COLUMNS` and `SHOW INDEX` now work properly if the table has dots in its name. [#69514](https://github.com/ClickHouse/ClickHouse/pull/69514) ([Salvatore Mesoraca](https://github.com/aiven-sal)).
|
||||
* Usage of the query cache for queries with an overflow mode != 'throw' is now disallowed. This prevents situations where potentially truncated and incorrect query results could be stored in the query cache. (issue [#67476](https://github.com/ClickHouse/ClickHouse/issues/67476)). [#69549](https://github.com/ClickHouse/ClickHouse/pull/69549) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||
* Keep original order of conditions during move to prewhere. Previously the order could change and it could lead to failing queries when the order is important. [#69560](https://github.com/ClickHouse/ClickHouse/pull/69560) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||
* Fix Keeper multi-request preprocessing after ZNOAUTH error. [#69627](https://github.com/ClickHouse/ClickHouse/pull/69627) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||
* Fix METADATA_MISMATCH that might have happened due to TTL with a WHERE clause in DatabaseReplicated when creating a new replica. [#69736](https://github.com/ClickHouse/ClickHouse/pull/69736) ([Nikolay Degterinsky](https://github.com/evillique)).
|
||||
* Fix `StorageS3(Azure)Queue` settings `tracked_file_ttl_sec`. We wrote it to keeper with key `tracked_file_ttl_sec`, but read as `tracked_files_ttl_sec`, which was a typo. [#69742](https://github.com/ClickHouse/ClickHouse/pull/69742) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||
* Use tryconvertfieldtotype in gethyperrectangleforrowgroup. [#69745](https://github.com/ClickHouse/ClickHouse/pull/69745) ([Miсhael Stetsyuk](https://github.com/mstetsyuk)).
|
||||
* Revert "Fix prewhere without columns and without adaptive index granularity (almost w/o anything)"'. Due to the reverted changes some errors might happen when reading data parts produced by old CH releases (presumably 2021 or older). [#68897](https://github.com/ClickHouse/ClickHouse/pull/68897) ([Alexander Gololobov](https://github.com/davenger)).
|
||||
|
||||
|
||||
### <a id="248"></a> ClickHouse release 24.8 LTS, 2024-08-20
|
||||
|
||||
#### Backward Incompatible Change
|
||||
* `clickhouse-client` and `clickhouse-local` now default to multi-query mode (instead single-query mode). As an example, `clickhouse-client -q "SELECT 1; SELECT 2"` now works, whereas users previously had to add `--multiquery` (or `-n`). The `--multiquery/-n` switch became obsolete. INSERT queries in multi-query statements are treated specially based on their FORMAT clause: If the FORMAT is `VALUES` (the most common case), the end of the INSERT statement is represented by a trailing semicolon `;` at the end of the query. For all other FORMATs (e.g. `CSV` or `JSONEachRow`), the end of the INSERT statement is represented by two newlines `\n\n` at the end of the query. [#63898](https://github.com/ClickHouse/ClickHouse/pull/63898) ([FFish](https://github.com/wxybear)).
|
||||
* In previous versions, it was possible to use an alternative syntax for `LowCardinality` data types by appending `WithDictionary` to the name of the data type. It was an initial working implementation, and it was never documented or exposed to the public. Now, it is deprecated. If you have used this syntax, you have to ALTER your tables and rename the data types to `LowCardinality`. [#66842](https://github.com/ClickHouse/ClickHouse/pull/66842) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||
* Fix logical errors with storage `Buffer` used with distributed destination table. It's a backward incompatible change: queries using `Buffer` with a distributed destination table may stop working if the table appears more than once in the query (e.g., in a self-join). [#67015](https://github.com/ClickHouse/ClickHouse/pull/67015) ([vdimir](https://github.com/vdimir)).
|
||||
* In previous versions, calling functions for random distributions based on the Gamma function (such as Chi-Squared, Student, Fisher) with negative arguments close to zero led to a long computation or an infinite loop. In the new version, calling these functions with zero or negative arguments will produce an exception. This closes [#67297](https://github.com/ClickHouse/ClickHouse/issues/67297). [#67326](https://github.com/ClickHouse/ClickHouse/pull/67326) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||
* The system table `text_log` is enabled by default. This is fully compatible with previous versions, but you may notice subtly increased disk usage on the local disk (this system table takes a tiny amount of disk space). [#67428](https://github.com/ClickHouse/ClickHouse/pull/67428) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||
* In previous versions, `arrayWithConstant` can be slow if asked to generate very large arrays. In the new version, it is limited to 1 GB per array. This closes [#32754](https://github.com/ClickHouse/ClickHouse/issues/32754). [#67741](https://github.com/ClickHouse/ClickHouse/pull/67741) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||
* Fix REPLACE modifier formatting (forbid omitting brackets). [#67774](https://github.com/ClickHouse/ClickHouse/pull/67774) ([Azat Khuzhin](https://github.com/azat)).
|
||||
* Backported in [#68349](https://github.com/ClickHouse/ClickHouse/issues/68349): Reimplement `Dynamic` type. Now when the limit of dynamic data types is reached new types are not casted to String but stored in a special data structure in binary format with binary encoded data type. Now any type ever inserted into `Dynamic` column can be read from it as subcolumn. [#68132](https://github.com/ClickHouse/ClickHouse/pull/68132) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||
|
||||
#### New Feature
|
||||
* Added a new `MergeTree` setting `deduplicate_merge_projection_mode` to control the projections during merges (for specific engines) and `OPTIMIZE DEDUPLICATE` query. Supported options: `throw` (throw an exception in case the projection is not fully supported for *MergeTree engine), `drop` (remove projection during merge if it can't be merged itself consistently) and `rebuild` (rebuild projection from scratch, which is a heavy operation). [#66672](https://github.com/ClickHouse/ClickHouse/pull/66672) ([jsc0218](https://github.com/jsc0218)).
|
||||
* Add `_etag` virtual column for S3 table engine. Fixes [#65312](https://github.com/ClickHouse/ClickHouse/issues/65312). [#65386](https://github.com/ClickHouse/ClickHouse/pull/65386) ([skyoct](https://github.com/skyoct)).
|
||||
* Added a tagging (namespace) mechanism for the query cache. The same queries with different tags are considered different by the query cache. Example: `SELECT 1 SETTINGS use_query_cache = 1, query_cache_tag = 'abc'` and `SELECT 1 SETTINGS use_query_cache = 1, query_cache_tag = 'def'` now create different query cache entries. [#68235](https://github.com/ClickHouse/ClickHouse/pull/68235) ([sakulali](https://github.com/sakulali)).
|
||||
* Support more variants of JOIN strictness (`LEFT/RIGHT SEMI/ANTI/ANY JOIN`) with inequality conditions which involve columns from both left and right table. e.g. `t1.y < t2.y` (see the setting `allow_experimental_join_condition`). [#64281](https://github.com/ClickHouse/ClickHouse/pull/64281) ([lgbo](https://github.com/lgbo-ustc)).
|
||||
* Interpret Hive-style partitioning for different engines (`File`, `URL`, `S3`, `AzureBlobStorage`, `HDFS`). Hive-style partitioning organizes data into partitioned sub-directories, making it efficient to query and manage large datasets. Currently, it only creates virtual columns with the appropriate name and data. The follow-up PR will introduce the appropriate data filtering (performance speedup). [#65997](https://github.com/ClickHouse/ClickHouse/pull/65997) ([Yarik Briukhovetskyi](https://github.com/yariks5s)).
|
||||
* Add function `printf` for Spark compatiability (but you can use the existing `format` function). [#66257](https://github.com/ClickHouse/ClickHouse/pull/66257) ([李扬](https://github.com/taiyang-li)).
|
||||
* Add options `restore_replace_external_engines_to_null` and `restore_replace_external_table_functions_to_null` to replace external engines and table_engines to `Null` engine that can be useful for testing. It should work for RESTORE and explicit table creation. [#66536](https://github.com/ClickHouse/ClickHouse/pull/66536) ([Ilya Yatsishin](https://github.com/qoega)).
|
||||
* Added support for reading `MULTILINESTRING` geometry in `WKT` format using function `readWKTLineString`. [#67647](https://github.com/ClickHouse/ClickHouse/pull/67647) ([Jacob Reckhard](https://github.com/jacobrec)).
|
||||
* Add a new table function `fuzzQuery`. This function allows the modification of a given query string with random variations. Example: `SELECT query FROM fuzzQuery('SELECT 1') LIMIT 5;`. [#67655](https://github.com/ClickHouse/ClickHouse/pull/67655) ([pufit](https://github.com/pufit)).
|
||||
* Add a query `ALTER TABLE ... DROP DETACHED PARTITION ALL` to drop all detached partitions. [#67885](https://github.com/ClickHouse/ClickHouse/pull/67885) ([Duc Canh Le](https://github.com/canhld94)).
|
||||
* Add the `rows_before_aggregation_at_least` statistic to the query response when a new setting, `rows_before_aggregation` is enabled. This statistic represents the number of rows read before aggregation. In the context of a distributed query, when using the `group by` or `max` aggregation function without a `limit`, `rows_before_aggregation_at_least` can reflect the number of rows hit by the query. [#66084](https://github.com/ClickHouse/ClickHouse/pull/66084) ([morning-color](https://github.com/morning-color)).
|
||||
* Support `OPTIMIZE` query on `Join` tables to reduce their memory footprint. [#67883](https://github.com/ClickHouse/ClickHouse/pull/67883) ([Duc Canh Le](https://github.com/canhld94)).
|
||||
* Allow run query instantly in play if you add `&run=1` in the URL [#66457](https://github.com/ClickHouse/ClickHouse/pull/66457) ([Aleksandr Musorin](https://github.com/AVMusorin)).
|
||||
|
||||
#### Experimental Feature
|
||||
* Implement a new `JSON` data type. [#66444](https://github.com/ClickHouse/ClickHouse/pull/66444) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||
* Add the new `TimeSeries` table engine. [#64183](https://github.com/ClickHouse/ClickHouse/pull/64183) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||
* Add new experimental `Kafka` storage engine to store offsets in Keeper instead of relying on committing them to Kafka. It makes the commit to ClickHouse tables atomic with regard to consumption from the queue. [#57625](https://github.com/ClickHouse/ClickHouse/pull/57625) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)).
|
||||
* Use adaptive read task size calculation method (adaptive meaning it depends on read column sizes) for parallel replicas. [#60377](https://github.com/ClickHouse/ClickHouse/pull/60377) ([Nikita Taranov](https://github.com/nickitat)).
|
||||
* Added statistics type `count_min` (count-min sketches) which provide selectivity estimations for equality predicates like `col = 'val'`. Supported data types are string, date, datatime and numeric types. [#65521](https://github.com/ClickHouse/ClickHouse/pull/65521) ([JackyWoo](https://github.com/JackyWoo)).
|
||||
|
||||
#### Performance Improvement
|
||||
* Setting `optimize_functions_to_subcolumns` is enabled by default. [#68053](https://github.com/ClickHouse/ClickHouse/pull/68053) ([Anton Popov](https://github.com/CurtizJ)).
|
||||
* Store the `plain_rewritable` disk directory metadata in `__meta` layout, separately from the merge tree data in the object storage. Move the `plain_rewritable` disk to a flat directory structure. [#65751](https://github.com/ClickHouse/ClickHouse/pull/65751) ([Julia Kartseva](https://github.com/jkartseva)).
|
||||
* Improve columns squashing (an operation happening in INSERT queries) for `String`/`Array`/`Map`/`Variant`/`Dynamic` types by reserving required memory in advance for all subcolumns. [#67043](https://github.com/ClickHouse/ClickHouse/pull/67043) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||
* Speed up `SYSTEM FLUSH LOGS` and flush logs on shutdown. [#67472](https://github.com/ClickHouse/ClickHouse/pull/67472) ([Sema Checherinda](https://github.com/CheSema)).
|
||||
* Improved overall performance of merges by reducing the overhead of the scheduling steps of merges. [#68016](https://github.com/ClickHouse/ClickHouse/pull/68016) ([Anton Popov](https://github.com/CurtizJ)).
|
||||
* Speed up tables removal for `DROP DATABASE` query, increased the default value for `database_catalog_drop_table_concurrency` to 16. [#67228](https://github.com/ClickHouse/ClickHouse/pull/67228) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||
* Avoid allocating too much capacity for array column while writing ORC. Performance speeds up 15% for an Array column. [#67879](https://github.com/ClickHouse/ClickHouse/pull/67879) ([李扬](https://github.com/taiyang-li)).
|
||||
* Speed up mutations for non-replicated MergeTree significantly [#66911](https://github.com/ClickHouse/ClickHouse/pull/66911) [#66909](https://github.com/ClickHouse/ClickHouse/pull/66909) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||
|
||||
#### Improvement
|
||||
* Setting `allow_experimental_analyzer` is renamed to `enable_analyzer`. The old name is preserved in a form of an alias. This signifies that Analyzer is no longer in beta and is fully promoted to production. [#66438](https://github.com/ClickHouse/ClickHouse/pull/66438) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||
* Improve schema inference of date times. Now DateTime64 used only when date time has fractional part, otherwise regular DateTime is used. Inference of Date/DateTime is more strict now, especially when `date_time_input_format='best_effort'` to avoid inferring date times from strings in corner cases. [#68382](https://github.com/ClickHouse/ClickHouse/pull/68382) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||
* ClickHouse server now supports new setting `max_keep_alive_requests`. For keep-alive HTTP connections to the server it works in tandem with `keep_alive_timeout` - if idle timeout not expired but there already more than `max_keep_alive_requests` requests done through the given connection - it will be closed by the server. [#61793](https://github.com/ClickHouse/ClickHouse/pull/61793) ([Nikita Taranov](https://github.com/nickitat)).
|
||||
* Various improvements in the advanced dashboard. This closes [#67697](https://github.com/ClickHouse/ClickHouse/issues/67697). This closes [#63407](https://github.com/ClickHouse/ClickHouse/issues/63407). This closes [#51129](https://github.com/ClickHouse/ClickHouse/issues/51129). This closes [#61204](https://github.com/ClickHouse/ClickHouse/issues/61204). [#67701](https://github.com/ClickHouse/ClickHouse/pull/67701) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||
* Do not require a grant for REMOTE when creating a Distributed table: a grant for the Distributed engine is enough. [#65419](https://github.com/ClickHouse/ClickHouse/pull/65419) ([jsc0218](https://github.com/jsc0218)).
|
||||
* Do not pass logs for keeper explicitly in the Docker image to allow overriding. [#65564](https://github.com/ClickHouse/ClickHouse/pull/65564) ([Azat Khuzhin](https://github.com/azat)).
|
||||
* Introduced `use_same_password_for_base_backup` settings for `BACKUP` and `RESTORE` queries, allowing to create and restore incremental backups to/from password protected archives. [#66214](https://github.com/ClickHouse/ClickHouse/pull/66214) ([Samuele](https://github.com/sguerrini97)).
|
||||
* Ignore `async_load_databases` for `ATTACH` query (previously it was possible for ATTACH to return before the tables had been attached). [#66240](https://github.com/ClickHouse/ClickHouse/pull/66240) ([Azat Khuzhin](https://github.com/azat)).
|
||||
* Added logs and metrics for rejected connections (where there are not enough resources). [#66410](https://github.com/ClickHouse/ClickHouse/pull/66410) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||
* Support proper `UUID` type for MongoDB engine. [#66671](https://github.com/ClickHouse/ClickHouse/pull/66671) ([Azat Khuzhin](https://github.com/azat)).
|
||||
* Add replication lag and recovery time metrics. [#66703](https://github.com/ClickHouse/ClickHouse/pull/66703) ([Miсhael Stetsyuk](https://github.com/mstetsyuk)).
|
||||
* Add `DiskS3NoSuchKeyErrors` metric. [#66704](https://github.com/ClickHouse/ClickHouse/pull/66704) ([Miсhael Stetsyuk](https://github.com/mstetsyuk)).
|
||||
* Ensure the `COMMENT` clause works for all table engines. [#66832](https://github.com/ClickHouse/ClickHouse/pull/66832) ([Joe Lynch](https://github.com/joelynch)).
|
||||
* Function `mapFromArrays` now accepts `Map(K, V)` as first argument, for example: `SELECT mapFromArrays(map('a', 4, 'b', 4), ['aa', 'bb'])` now works and returns `{('a',4):'aa',('b',4):'bb'}`. Also, if the 1st argument is an Array, it can now also be of type `Array(Nullable(T))` or `Array(LowCardinality(Nullable(T)))` as long as the actual array values are not `NULL`. [#67103](https://github.com/ClickHouse/ClickHouse/pull/67103) ([李扬](https://github.com/taiyang-li)).
|
||||
* Read configuration for `clickhouse-local` from `~/.clickhouse-local`. [#67135](https://github.com/ClickHouse/ClickHouse/pull/67135) ([Azat Khuzhin](https://github.com/azat)).
|
||||
* Rename setting `input_format_orc_read_use_writer_time_zone` to `input_format_orc_reader_timezone` and allow the user to set the reader timezone. [#67175](https://github.com/ClickHouse/ClickHouse/pull/67175) ([kevinyhzou](https://github.com/KevinyhZou)).
|
||||
* Decrease level of the `Socket is not connected` error when HTTP connection immediately reset by peer after connecting, close [#34218](https://github.com/ClickHouse/ClickHouse/issues/34218). [#67177](https://github.com/ClickHouse/ClickHouse/pull/67177) ([vdimir](https://github.com/vdimir)).
|
||||
* Add ability to load dashboards for `system.dashboards` from config (once set, they overrides the default dashboards preset). [#67232](https://github.com/ClickHouse/ClickHouse/pull/67232) ([Azat Khuzhin](https://github.com/azat)).
|
||||
* The window functions in SQL are traditionally in snake case. ClickHouse uses `camelCase`, so new aliases `denseRank()` and `percentRank()` have been created. These new functions can be called the exact same as the original `dense_rank()` and `percent_rank()` functions. Both snake case and camelCase syntaxes remain usable. A new test for each of the functions has been added as well. This closes [#67042](https://github.com/ClickHouse/ClickHouse/issues/67042) . [#67334](https://github.com/ClickHouse/ClickHouse/pull/67334) ([Peter Nguyen](https://github.com/petern48)).
|
||||
* Autodetect configuration file format if is not `.xml`, `.yml` or `.yaml`. If the file begins with < it might be XML, otherwise it might be YAML. It is useful when providing a configuration file from a pipe: `clickhouse-server --config-file <(echo "hello: world")`. [#67391](https://github.com/ClickHouse/ClickHouse/pull/67391) ([sakulali](https://github.com/sakulali)).
|
||||
* Functions `formatDateTime` and `formatDateTimeInJodaSyntax` now treat their format parameter as optional. If it is not specified, format strings `%Y-%m-%d %H:%i:%s` and `yyyy-MM-dd HH:mm:ss` are assumed. Example: `SELECT parseDateTime('2021-01-04 23:12:34')` now returns DateTime value `2021-01-04 23:12:34` (previously, this threw an exception). [#67399](https://github.com/ClickHouse/ClickHouse/pull/67399) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||
* Automatically retry Keeper requests in KeeperMap if they happen because of timeout or connection loss. [#67448](https://github.com/ClickHouse/ClickHouse/pull/67448) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||
* Add `-no-pie` to Aarch64 Linux builds to allow proper introspection and symbolizing of stacktraces after a ClickHouse restart. [#67916](https://github.com/ClickHouse/ClickHouse/pull/67916) ([filimonov](https://github.com/filimonov)).
|
||||
* Added profile events for merges and mutations for better introspection. [#68015](https://github.com/ClickHouse/ClickHouse/pull/68015) ([Anton Popov](https://github.com/CurtizJ)).
|
||||
* Remove unnecessary logs for non-replicated `MergeTree`. [#68238](https://github.com/ClickHouse/ClickHouse/pull/68238) ([Daniil Ivanik](https://github.com/divanik)).
|
||||
|
||||
#### Build/Testing/Packaging Improvement
|
||||
* Integration tests flaky check will not run each test case multiple times to find more issues in tests and make them more reliable. It is using `pytest-repeat` library to run test case multiple times for the same environment. It is important to cleanup tables and other entities in the end of a test case to pass. Repeating works much faster than several pytest runs as it starts necessary containers only once. [#66986](https://github.com/ClickHouse/ClickHouse/pull/66986) ([Ilya Yatsishin](https://github.com/qoega)).
|
||||
* Unblock the usage of CLion with ClickHouse. In previous versions, CLion freezed for a minute on every keypress. This closes [#66994](https://github.com/ClickHouse/ClickHouse/issues/66994). [#66995](https://github.com/ClickHouse/ClickHouse/pull/66995) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||
* getauxval: avoid a crash under a sanitizer re-exec due to high ASLR entropy in newer Linux kernels. [#67081](https://github.com/ClickHouse/ClickHouse/pull/67081) ([Raúl Marín](https://github.com/Algunenano)).
|
||||
* Some parts of client code are extracted to a single file and highest possible level optimization is applied to them even for debug builds. This closes: [#65745](https://github.com/ClickHouse/ClickHouse/issues/65745). [#67215](https://github.com/ClickHouse/ClickHouse/pull/67215) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||
|
||||
#### Bug Fix
|
||||
* Only relevant to the experimental Variant data type. Fix crash with Variant + AggregateFunction type. [#67122](https://github.com/ClickHouse/ClickHouse/pull/67122) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||
* Fix crash in DistributedAsyncInsert when connection is empty. [#67219](https://github.com/ClickHouse/ClickHouse/pull/67219) ([Pablo Marcos](https://github.com/pamarcos)).
|
||||
* Fix crash of `uniq` and `uniqTheta ` with `tuple()` argument. Closes [#67303](https://github.com/ClickHouse/ClickHouse/issues/67303). [#67306](https://github.com/ClickHouse/ClickHouse/pull/67306) ([flynn](https://github.com/ucasfl)).
|
||||
* Fixes [#66026](https://github.com/ClickHouse/ClickHouse/issues/66026). Avoid unresolved table function arguments traversal in `ReplaceTableNodeToDummyVisitor`. [#67522](https://github.com/ClickHouse/ClickHouse/pull/67522) ([Dmitry Novik](https://github.com/novikd)).
|
||||
* Fix potential stack overflow in `JSONMergePatch` function. Renamed this function from `jsonMergePatch` to `JSONMergePatch` because the previous name was wrong. The previous name is still kept for compatibility. Improved diagnostic of errors in the function. This closes [#67304](https://github.com/ClickHouse/ClickHouse/issues/67304). [#67756](https://github.com/ClickHouse/ClickHouse/pull/67756) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||
* Fixed a NULL pointer dereference, triggered by a specially crafted query, that crashed the server via hopEnd, hopStart, tumbleEnd, and tumbleStart. [#68098](https://github.com/ClickHouse/ClickHouse/pull/68098) ([Salvatore Mesoraca](https://github.com/aiven-sal)).
|
||||
* Fixed `Not-ready Set` in some system tables when filtering using subqueries. [#66018](https://github.com/ClickHouse/ClickHouse/pull/66018) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||
* Fixed reading of subcolumns after `ALTER ADD COLUMN` query. [#66243](https://github.com/ClickHouse/ClickHouse/pull/66243) ([Anton Popov](https://github.com/CurtizJ)).
|
||||
* Fix boolean literals in query sent to external database (for engines like `PostgreSQL`). [#66282](https://github.com/ClickHouse/ClickHouse/pull/66282) ([vdimir](https://github.com/vdimir)).
|
||||
* Fix formatting of query with aliased JOIN ON expression, e.g. `... JOIN t2 ON (x = y) AS e ORDER BY x` should be formatted as `... JOIN t2 ON ((x = y) AS e) ORDER BY x`. [#66312](https://github.com/ClickHouse/ClickHouse/pull/66312) ([vdimir](https://github.com/vdimir)).
|
||||
* Fix cluster() for inter-server secret (preserve initial user as before). [#66364](https://github.com/ClickHouse/ClickHouse/pull/66364) ([Azat Khuzhin](https://github.com/azat)).
|
||||
* Fix possible runtime error while converting Array field with nulls to Array(Variant). [#66727](https://github.com/ClickHouse/ClickHouse/pull/66727) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||
* Fix for occasional deadlock in Context::getDDLWorker. [#66843](https://github.com/ClickHouse/ClickHouse/pull/66843) ([Alexander Gololobov](https://github.com/davenger)).
|
||||
* Fix creating KeeperMap table after an incomplete drop. [#66865](https://github.com/ClickHouse/ClickHouse/pull/66865) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||
* Fix broken part error while restoring to a `s3_plain_rewritable` disk. [#66881](https://github.com/ClickHouse/ClickHouse/pull/66881) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||
* In rare cases ClickHouse could consider parts as broken because of some unexpected projections on disk. Now it's fixed. [#66898](https://github.com/ClickHouse/ClickHouse/pull/66898) ([alesapin](https://github.com/alesapin)).
|
||||
* Fix invalid format detection in schema inference that could lead to logical error Format {} doesn't support schema inference. [#66899](https://github.com/ClickHouse/ClickHouse/pull/66899) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||
* Fix possible deadlock on query cancel with parallel replicas. [#66905](https://github.com/ClickHouse/ClickHouse/pull/66905) ([Nikita Taranov](https://github.com/nickitat)).
|
||||
* Forbid create as select even when database_replicated_allow_heavy_create is set. It was unconditionally forbidden in 23.12 and accidentally allowed under the setting in unreleased 24.7. [#66980](https://github.com/ClickHouse/ClickHouse/pull/66980) ([vdimir](https://github.com/vdimir)).
|
||||
* Reading from the `numbers` could wrongly throw an exception when the `max_rows_to_read` limit was set. This closes [#66992](https://github.com/ClickHouse/ClickHouse/issues/66992). [#66996](https://github.com/ClickHouse/ClickHouse/pull/66996) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||
* Add proper type conversion to lagInFrame and leadInFrame window functions - fixes msan test. [#67091](https://github.com/ClickHouse/ClickHouse/pull/67091) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||
* TRUNCATE DATABASE used to stop replication as if it was a DROP DATABASE query, it's fixed. [#67129](https://github.com/ClickHouse/ClickHouse/pull/67129) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||
* Use a separate client context in `clickhouse-local`. [#67133](https://github.com/ClickHouse/ClickHouse/pull/67133) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||
* Fix error `Cannot convert column because it is non constant in source stream but must be constant in result.` for a query that reads from the `Merge` table over the `Distriburted` table with one shard. [#67146](https://github.com/ClickHouse/ClickHouse/pull/67146) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||
* Correct behavior of `ORDER BY all` with disabled `enable_order_by_all` and parallel replicas (distributed queries as well). [#67153](https://github.com/ClickHouse/ClickHouse/pull/67153) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||
* Fix wrong usage of input_format_max_bytes_to_read_for_schema_inference in schema cache. [#67157](https://github.com/ClickHouse/ClickHouse/pull/67157) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||
* Fix the memory leak for count distinct, when exception issued during group by single nullable key. [#67171](https://github.com/ClickHouse/ClickHouse/pull/67171) ([Jet He](https://github.com/compasses)).
|
||||
* Fix an error in optimization which converts OUTER JOIN to INNER JOIN. This closes [#67156](https://github.com/ClickHouse/ClickHouse/issues/67156). This closes [#66447](https://github.com/ClickHouse/ClickHouse/issues/66447). The bug was introduced in https://github.com/ClickHouse/ClickHouse/pull/62907. [#67178](https://github.com/ClickHouse/ClickHouse/pull/67178) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||
* Fix error `Conversion from AggregateFunction(name, Type) to AggregateFunction(name, Nullable(Type)) is not supported`. The bug was caused by the `optimize_rewrite_aggregate_function_with_if` optimization. Fixes [#67112](https://github.com/ClickHouse/ClickHouse/issues/67112). [#67229](https://github.com/ClickHouse/ClickHouse/pull/67229) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||
* Fix hung query when using empty tuple as lhs of function IN. [#67295](https://github.com/ClickHouse/ClickHouse/pull/67295) ([Duc Canh Le](https://github.com/canhld94)).
|
||||
* It was possible to create a very deep nested JSON data that triggered stack overflow while skipping unknown fields. This closes [#67292](https://github.com/ClickHouse/ClickHouse/issues/67292). [#67324](https://github.com/ClickHouse/ClickHouse/pull/67324) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||
* Fix attaching ReplicatedMergeTree table after exception during startup. [#67360](https://github.com/ClickHouse/ClickHouse/pull/67360) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||
* Fix segfault caused by incorrectly detaching from thread group in `Aggregator`. [#67385](https://github.com/ClickHouse/ClickHouse/pull/67385) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||
* Fix one more case when a non-deterministic function is specified in PK. [#67395](https://github.com/ClickHouse/ClickHouse/pull/67395) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||
* Fixed `bloom_filter` index breaking queries with mildly weird conditions like `(k=2)=(k=2)` or `has([1,2,3], k)`. [#67423](https://github.com/ClickHouse/ClickHouse/pull/67423) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||
* Correctly parse file name/URI containing `::` if it's not an archive. [#67433](https://github.com/ClickHouse/ClickHouse/pull/67433) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||
* Fix wait for tasks in ~WriteBufferFromS3 in case WriteBuffer was cancelled. [#67459](https://github.com/ClickHouse/ClickHouse/pull/67459) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||
* Protect temporary part directories from removing during RESTORE. [#67491](https://github.com/ClickHouse/ClickHouse/pull/67491) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||
* Fix execution of nested short-circuit functions. [#67520](https://github.com/ClickHouse/ClickHouse/pull/67520) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||
* Fix `Logical error: Expected the argument №N of type T to have X rows, but it has 0`. The error could happen in a remote query with constant expression in `GROUP BY` (with a new analyzer). [#67536](https://github.com/ClickHouse/ClickHouse/pull/67536) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||
* Fix join on tuple with NULLs: Some queries with the new analyzer and `NULL` inside the tuple in the `JOIN ON` section returned incorrect results. [#67538](https://github.com/ClickHouse/ClickHouse/pull/67538) ([vdimir](https://github.com/vdimir)).
|
||||
* Fix redundant reschedule of FileCache::freeSpaceRatioKeepingThreadFunc() in case of full non-evictable cache. [#67540](https://github.com/ClickHouse/ClickHouse/pull/67540) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||
* Fix inserting into stream like engines (Kafka, RabbitMQ, NATS) through HTTP interface. [#67554](https://github.com/ClickHouse/ClickHouse/pull/67554) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)).
|
||||
* Fix for function `toStartOfWeek` which returned the wrong result with a small `DateTime64` value. [#67558](https://github.com/ClickHouse/ClickHouse/pull/67558) ([Yarik Briukhovetskyi](https://github.com/yariks5s)).
|
||||
* Fix creation of view with recursive CTE. [#67587](https://github.com/ClickHouse/ClickHouse/pull/67587) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||
* Fix `Logical error: 'file_offset_of_buffer_end <= read_until_position'` in filesystem cache. Closes [#57508](https://github.com/ClickHouse/ClickHouse/issues/57508). [#67623](https://github.com/ClickHouse/ClickHouse/pull/67623) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||
* Fixes [#62282](https://github.com/ClickHouse/ClickHouse/issues/62282). Removed the call to `convertFieldToString()` and added datatype specific serialization code. Parameterized view substitution was broken for multiple datatypes when parameter value was a function or expression returning datatype instance. [#67654](https://github.com/ClickHouse/ClickHouse/pull/67654) ([Shankar](https://github.com/shiyer7474)).
|
||||
* Fix crash on `percent_rank`. `percent_rank`'s default frame type is changed to `range unbounded preceding and unbounded following`. `IWindowFunction`'s default window frame is considered and now window functions without window frame definition in sql can be put into different `WindowTransfomer`s properly. [#67661](https://github.com/ClickHouse/ClickHouse/pull/67661) ([lgbo](https://github.com/lgbo-ustc)).
|
||||
* Fix reloading SQL UDFs with UNION. Previously, restarting the server could make UDF invalid. [#67665](https://github.com/ClickHouse/ClickHouse/pull/67665) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||
* Fix possible logical error "Unexpected return type from if" with experimental Variant type and enabled setting `use_variant_as_common_type ` in function if with Tuples and Maps. [#67687](https://github.com/ClickHouse/ClickHouse/pull/67687) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||
* Due to a bug in Linux Kernel, a query can hung in `TimerDescriptor::drain`. This closes [#37686](https://github.com/ClickHouse/ClickHouse/issues/37686). [#67702](https://github.com/ClickHouse/ClickHouse/pull/67702) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||
* Fix completion of `RESTORE ON CLUSTER` command. [#67720](https://github.com/ClickHouse/ClickHouse/pull/67720) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||
* Fix dictionary hang in case of CANNOT_SCHEDULE_TASK while loading. [#67751](https://github.com/ClickHouse/ClickHouse/pull/67751) ([Azat Khuzhin](https://github.com/azat)).
|
||||
* Queries like `SELECT count() FROM t WHERE cast(c = 1 or c = 9999 AS Bool) SETTINGS use_skip_indexes=1` with bloom filter indexes on `c` now work correctly. [#67781](https://github.com/ClickHouse/ClickHouse/pull/67781) ([jsc0218](https://github.com/jsc0218)).
|
||||
* Fix wrong aggregation result in some queries with aggregation without keys and filter, close [#67419](https://github.com/ClickHouse/ClickHouse/issues/67419). [#67804](https://github.com/ClickHouse/ClickHouse/pull/67804) ([vdimir](https://github.com/vdimir)).
|
||||
* Validate experimental/suspicious data types in ALTER ADD/MODIFY COLUMN. [#67911](https://github.com/ClickHouse/ClickHouse/pull/67911) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||
* Fix DateTime64 parsing after constant folding in distributed queries, close [#66773](https://github.com/ClickHouse/ClickHouse/issues/66773). [#67920](https://github.com/ClickHouse/ClickHouse/pull/67920) ([vdimir](https://github.com/vdimir)).
|
||||
* Fix wrong `count()` result when there is non-deterministic function in predicate. [#67922](https://github.com/ClickHouse/ClickHouse/pull/67922) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)).
|
||||
* Fixed the calculation of the maximum thread soft limit in containerized environments where the usable CPU count is limited. [#67963](https://github.com/ClickHouse/ClickHouse/pull/67963) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||
* Now ClickHouse doesn't consider part as broken if projection doesn't exist on disk but exists in `checksums.txt`. [#68003](https://github.com/ClickHouse/ClickHouse/pull/68003) ([alesapin](https://github.com/alesapin)).
|
||||
* Fixed skipping of untouched parts in mutations with new analyzer. Previously with enabled analyzer data in part could be rewritten by mutation even if mutation doesn't affect this part according to predicate. [#68052](https://github.com/ClickHouse/ClickHouse/pull/68052) ([Anton Popov](https://github.com/CurtizJ)).
|
||||
* Removes an incorrect optimization to remove sorting in subqueries that use `OFFSET`. Fixes [#67906](https://github.com/ClickHouse/ClickHouse/issues/67906). [#68099](https://github.com/ClickHouse/ClickHouse/pull/68099) ([Graham Campbell](https://github.com/GrahamCampbell)).
|
||||
* Attempt to fix `Block structure mismatch in AggregatingStep stream: different types` for aggregate projection optimization. [#68107](https://github.com/ClickHouse/ClickHouse/pull/68107) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||
* Try fix postgres crash when query is cancelled. [#68288](https://github.com/ClickHouse/ClickHouse/pull/68288) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||
* Fix missing sync replica mode in query `SYSTEM SYNC REPLICA`. [#68326](https://github.com/ClickHouse/ClickHouse/pull/68326) ([Duc Canh Le](https://github.com/canhld94)).
|
||||
|
||||
|
||||
### <a id="247"></a> ClickHouse release 24.7, 2024-07-30
|
||||
|
||||
#### Backward Incompatible Change
|
||||
|
31
CITATION.cff
Normal file
31
CITATION.cff
Normal file
@ -0,0 +1,31 @@
|
||||
# This CITATION.cff file was generated with cffinit.
|
||||
|
||||
cff-version: 1.2.0
|
||||
title: "ClickHouse"
|
||||
message: "If you use this software, please cite it as below."
|
||||
type: software
|
||||
authors:
|
||||
- family-names: "Milovidov"
|
||||
given-names: "Alexey"
|
||||
repository-code: 'https://github.com/ClickHouse/ClickHouse'
|
||||
url: 'https://clickhouse.com'
|
||||
license: Apache-2.0
|
||||
preferred-citation:
|
||||
type: article
|
||||
authors:
|
||||
- family-names: "Schulze"
|
||||
given-names: "Robert"
|
||||
- family-names: "Schreiber"
|
||||
given-names: "Tom"
|
||||
- family-names: "Yatsishin"
|
||||
given-names: "Ilya"
|
||||
- family-names: "Dahimene"
|
||||
given-names: "Ryadh"
|
||||
- family-names: "Milovidov"
|
||||
given-names: "Alexey"
|
||||
journal: "Proceedings of the VLDB Endowment"
|
||||
title: "ClickHouse - Lightning Fast Analytics for Everyone"
|
||||
year: 2024
|
||||
volume: 17
|
||||
issue: 12
|
||||
doi: 10.14778/3685800.3685802
|
@ -187,14 +187,6 @@ else ()
|
||||
set(NO_WHOLE_ARCHIVE --no-whole-archive)
|
||||
endif ()
|
||||
|
||||
if (NOT CMAKE_BUILD_TYPE_UC STREQUAL "RELEASE")
|
||||
# Can be lld or ld-lld or lld-13 or /path/to/lld.
|
||||
if (LINKER_NAME MATCHES "lld")
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--gdb-index")
|
||||
message (STATUS "Adding .gdb-index via --gdb-index linker option.")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
if (NOT (SANITIZE_COVERAGE OR WITH_COVERAGE)
|
||||
AND (CMAKE_BUILD_TYPE_UC STREQUAL "RELEASE"
|
||||
OR CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO"
|
||||
@ -330,17 +322,21 @@ if (DISABLE_OMIT_FRAME_POINTER)
|
||||
set (CMAKE_ASM_FLAGS_ADD "${CMAKE_ASM_FLAGS_ADD} -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer")
|
||||
endif()
|
||||
|
||||
# Before you start hating your debugger because it refuses to show variables ('<optimized out>'), try building with -DDEBUG_O_LEVEL="0"
|
||||
# https://stackoverflow.com/questions/63386189/whats-the-difference-between-a-compilers-o0-option-and-og-option/63386263#63386263
|
||||
set(DEBUG_O_LEVEL "g" CACHE STRING "The -Ox level used for debug builds")
|
||||
|
||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COMPILER_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
|
||||
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
|
||||
set (CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -Og ${DEBUG_INFO_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
|
||||
set (CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -O${DEBUG_O_LEVEL} ${DEBUG_INFO_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
|
||||
|
||||
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COMPILER_FLAGS} ${CMAKE_C_FLAGS_ADD}")
|
||||
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_C_FLAGS_ADD}")
|
||||
set (CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -Og ${DEBUG_INFO_FLAGS} ${CMAKE_C_FLAGS_ADD}")
|
||||
set (CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -O${DEBUG_O_LEVEL} ${DEBUG_INFO_FLAGS} ${CMAKE_C_FLAGS_ADD}")
|
||||
|
||||
set (CMAKE_ASM_FLAGS "${CMAKE_ASM_FLAGS} ${COMPILER_FLAGS} ${CMAKE_ASM_FLAGS_ADD}")
|
||||
set (CMAKE_ASM_FLAGS_RELWITHDEBINFO "${CMAKE_ASM_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_ASM_FLAGS_ADD}")
|
||||
set (CMAKE_ASM_FLAGS_DEBUG "${CMAKE_ASM_FLAGS_DEBUG} -Og ${DEBUG_INFO_FLAGS} ${CMAKE_ASM_FLAGS_ADD}")
|
||||
set (CMAKE_ASM_FLAGS_DEBUG "${CMAKE_ASM_FLAGS_DEBUG} -O${DEBUG_O_LEVEL} ${DEBUG_INFO_FLAGS} ${CMAKE_ASM_FLAGS_ADD}")
|
||||
|
||||
if (OS_DARWIN)
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -stdlib=libc++")
|
||||
@ -402,7 +398,7 @@ if ((NOT OS_LINUX AND NOT OS_ANDROID) OR (CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG")
|
||||
set(ENABLE_GWP_ASAN OFF)
|
||||
endif ()
|
||||
|
||||
option (ENABLE_FIU "Enable Fiu" ON)
|
||||
option (ENABLE_LIBFIU "Enable libfiu" ON)
|
||||
|
||||
option(WERROR "Enable -Werror compiler option" ON)
|
||||
|
||||
@ -428,12 +424,17 @@ if (NOT SANITIZE)
|
||||
set (CMAKE_POSITION_INDEPENDENT_CODE OFF)
|
||||
endif()
|
||||
|
||||
if (OS_LINUX AND NOT (ARCH_AARCH64 OR ARCH_S390X) AND NOT SANITIZE)
|
||||
# Slightly more efficient code can be generated
|
||||
# It's disabled for ARM because otherwise ClickHouse cannot run on Android.
|
||||
if (NOT OS_ANDROID AND OS_LINUX AND NOT ARCH_S390X AND NOT SANITIZE)
|
||||
# Using '-no-pie' builds executables with fixed addresses, resulting in slightly more efficient code
|
||||
# and keeping binary addresses constant even with ASLR enabled.
|
||||
# Disabled on Android as it requires PIE: https://source.android.com/docs/security/enhancements#android-5
|
||||
# Disabled on IBM S390X due to build issues with 'no-pie'
|
||||
# Disabled with sanitizers to avoid issues with maximum relocation size: https://github.com/ClickHouse/ClickHouse/pull/49145
|
||||
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -fno-pie")
|
||||
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -fno-pie")
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -no-pie -Wl,-no-pie")
|
||||
else ()
|
||||
message (WARNING "ClickHouse is built as PIE, system.trace_log will contain invalid addresses after server restart.")
|
||||
endif ()
|
||||
|
||||
if (ENABLE_TESTS)
|
||||
@ -604,7 +605,9 @@ if (NATIVE_BUILD_TARGETS
|
||||
|
||||
execute_process(
|
||||
COMMAND ${CMAKE_COMMAND} -E make_directory "${NATIVE_BUILD_DIR}"
|
||||
COMMAND_ECHO STDOUT)
|
||||
COMMAND_ECHO STDOUT
|
||||
COMMAND_ERROR_IS_FATAL ANY
|
||||
)
|
||||
|
||||
execute_process(
|
||||
COMMAND ${CMAKE_COMMAND}
|
||||
@ -616,9 +619,13 @@ if (NATIVE_BUILD_TARGETS
|
||||
"-DENABLE_CLICKHOUSE_SELF_EXTRACTING=${ENABLE_CLICKHOUSE_SELF_EXTRACTING}"
|
||||
${PROJECT_SOURCE_DIR}
|
||||
WORKING_DIRECTORY "${NATIVE_BUILD_DIR}"
|
||||
COMMAND_ECHO STDOUT)
|
||||
COMMAND_ECHO STDOUT
|
||||
COMMAND_ERROR_IS_FATAL ANY
|
||||
)
|
||||
|
||||
execute_process(
|
||||
COMMAND ${CMAKE_COMMAND} --build "${NATIVE_BUILD_DIR}" --target ${NATIVE_BUILD_TARGETS}
|
||||
COMMAND_ECHO STDOUT)
|
||||
COMMAND_ECHO STDOUT
|
||||
COMMAND_ERROR_IS_FATAL ANY
|
||||
)
|
||||
endif ()
|
||||
|
@ -51,8 +51,14 @@ if (NOT "$ENV{CFLAGS}" STREQUAL ""
|
||||
endif()
|
||||
|
||||
# Default toolchain - this is needed to avoid dependency on OS files.
|
||||
execute_process(COMMAND uname -s OUTPUT_VARIABLE OS)
|
||||
execute_process(COMMAND uname -m OUTPUT_VARIABLE ARCH)
|
||||
execute_process(COMMAND uname -s
|
||||
OUTPUT_VARIABLE OS
|
||||
COMMAND_ERROR_IS_FATAL ANY
|
||||
)
|
||||
execute_process(COMMAND uname -m
|
||||
OUTPUT_VARIABLE ARCH
|
||||
COMMAND_ERROR_IS_FATAL ANY
|
||||
)
|
||||
|
||||
# By default, prefer clang on Linux
|
||||
# But note, that you still may change the compiler with -DCMAKE_C_COMPILER/-DCMAKE_CXX_COMPILER.
|
||||
|
32
README.md
32
README.md
@ -34,17 +34,43 @@ curl https://clickhouse.com/ | sh
|
||||
|
||||
Every month we get together with the community (users, contributors, customers, those interested in learning more about ClickHouse) to discuss what is coming in the latest release. If you are interested in sharing what you've built on ClickHouse, let us know.
|
||||
|
||||
* [v24.8 Community Call](https://clickhouse.com/company/events/v24-8-community-release-call) - August 29
|
||||
* [v24.9 Community Call](https://clickhouse.com/company/events/v24-9-community-release-call) - September 26
|
||||
|
||||
## Upcoming Events
|
||||
|
||||
Keep an eye out for upcoming meetups and events around the world. Somewhere else you want us to be? Please feel free to reach out to tyler `<at>` clickhouse `<dot>` com. You can also peruse [ClickHouse Events](https://clickhouse.com/company/news-events) for a list of all upcoming trainings, meetups, speaking engagements, etc.
|
||||
|
||||
* MORE COMING SOON!
|
||||
Upcoming meetups
|
||||
|
||||
* [Jakarta Meetup](https://www.meetup.com/clickhouse-indonesia-user-group/events/303191359/) - October 1
|
||||
* [Singapore Meetup](https://www.meetup.com/clickhouse-singapore-meetup-group/events/303212064/) - October 3
|
||||
* [Madrid Meetup](https://www.meetup.com/clickhouse-spain-user-group/events/303096564/) - October 22
|
||||
* [Barcelona Meetup](https://www.meetup.com/clickhouse-spain-user-group/events/303096876/) - October 29
|
||||
* [Oslo Meetup](https://www.meetup.com/open-source-real-time-data-warehouse-real-time-analytics/events/302938622) - October 31
|
||||
* [Ghent Meetup](https://www.meetup.com/clickhouse-belgium-user-group/events/303049405/) - November 19
|
||||
* [Dubai Meetup](https://www.meetup.com/clickhouse-dubai-meetup-group/events/303096989/) - November 21
|
||||
* [Paris Meetup](https://www.meetup.com/clickhouse-france-user-group/events/303096434) - November 26
|
||||
|
||||
Recently completed meetups
|
||||
|
||||
* [ClickHouse Guangzhou User Group Meetup](https://mp.weixin.qq.com/s/GSvo-7xUoVzCsuUvlLTpCw) - August 25
|
||||
* [Seattle Meetup (Statsig)](https://www.meetup.com/clickhouse-seattle-user-group/events/302518075/) - August 27
|
||||
* [Melbourne Meetup](https://www.meetup.com/clickhouse-australia-user-group/events/302732666/) - August 27
|
||||
* [Sydney Meetup](https://www.meetup.com/clickhouse-australia-user-group/events/302862966/) - September 5
|
||||
* [Zurich Meetup](https://www.meetup.com/clickhouse-switzerland-meetup-group/events/302267429/) - September 5
|
||||
* [San Francisco Meetup (Cloudflare)](https://www.meetup.com/clickhouse-silicon-valley-meetup-group/events/302540575) - September 5
|
||||
* [Raleigh Meetup (Deutsche Bank)](https://www.meetup.com/triangletechtalks/events/302723486/) - September 9
|
||||
* [New York Meetup (Rokt)](https://www.meetup.com/clickhouse-new-york-user-group/events/302575342) - September 10
|
||||
* [Toronto Meetup (Shopify)](https://www.meetup.com/clickhouse-toronto-user-group/events/301490855/) - September 10
|
||||
* [Chicago Meetup (Jump Capital)](https://lu.ma/43tvmrfw) - September 12
|
||||
* [London Meetup](https://www.meetup.com/clickhouse-london-user-group/events/302977267) - September 17
|
||||
* [Austin Meetup](https://www.meetup.com/clickhouse-austin-user-group/events/302558689/) - September 17
|
||||
* [Bangalore Meetup](https://www.meetup.com/clickhouse-bangalore-user-group/events/303208274/) - September 18
|
||||
* [Tel Aviv Meetup](https://www.meetup.com/clickhouse-meetup-israel/events/303095121) - September 22
|
||||
|
||||
## Recent Recordings
|
||||
* **Recent Meetup Videos**: [Meetup Playlist](https://www.youtube.com/playlist?list=PL0Z2YDlm0b3iNDUzpY1S3L_iV4nARda_U) Whenever possible recordings of the ClickHouse Community Meetups are edited and presented as individual talks. Current featuring "Modern SQL in 2023", "Fast, Concurrent, and Consistent Asynchronous INSERTS in ClickHouse", and "Full-Text Indices: Design and Experiments"
|
||||
* **Recording available**: [**v24.4 Release Call**](https://www.youtube.com/watch?v=dtUqgcfOGmE) All the features of 24.4, one convenient video! Watch it now!
|
||||
* **Recording available**: [**v24.8 LTS Release Call**](https://www.youtube.com/watch?v=AeLmp2jc51k) All the features of 24.8 LTS, one convenient video! Watch it now!
|
||||
|
||||
## Interested in joining ClickHouse and making it your full-time job?
|
||||
|
||||
|
16
SECURITY.md
16
SECURITY.md
@ -14,25 +14,15 @@ The following versions of ClickHouse server are currently supported with securit
|
||||
|
||||
| Version | Supported |
|
||||
|:-|:-|
|
||||
| 24.8 | ✔️ |
|
||||
| 24.7 | ✔️ |
|
||||
| 24.6 | ✔️ |
|
||||
| 24.5 | ✔️ |
|
||||
| 24.5 | ❌ |
|
||||
| 24.4 | ❌ |
|
||||
| 24.3 | ✔️ |
|
||||
| 24.2 | ❌ |
|
||||
| 24.1 | ❌ |
|
||||
| 23.12 | ❌ |
|
||||
| 23.11 | ❌ |
|
||||
| 23.10 | ❌ |
|
||||
| 23.9 | ❌ |
|
||||
| 23.8 | ✔️ |
|
||||
| 23.7 | ❌ |
|
||||
| 23.6 | ❌ |
|
||||
| 23.5 | ❌ |
|
||||
| 23.4 | ❌ |
|
||||
| 23.3 | ❌ |
|
||||
| 23.2 | ❌ |
|
||||
| 23.1 | ❌ |
|
||||
| 23.* | ❌ |
|
||||
| 22.* | ❌ |
|
||||
| 21.* | ❌ |
|
||||
| 20.* | ❌ |
|
||||
|
@ -1,4 +1,4 @@
|
||||
add_compile_options($<$<OR:$<COMPILE_LANGUAGE:C>,$<COMPILE_LANGUAGE:CXX>>:${COVERAGE_FLAGS}>)
|
||||
add_compile_options("$<$<OR:$<COMPILE_LANGUAGE:C>,$<COMPILE_LANGUAGE:CXX>>:${COVERAGE_FLAGS}>")
|
||||
|
||||
if (USE_CLANG_TIDY)
|
||||
set (CMAKE_CXX_CLANG_TIDY "${CLANG_TIDY_PATH}")
|
||||
@ -8,6 +8,8 @@ endif ()
|
||||
# when instantiated from JSON.cpp. Try again when libcxx(abi) and Clang are upgraded to 16.
|
||||
set (CMAKE_CXX_STANDARD 20)
|
||||
|
||||
configure_file(GitHash.cpp.in GitHash.generated.cpp)
|
||||
|
||||
set (SRCS
|
||||
argsToConfig.cpp
|
||||
cgroupsv2.cpp
|
||||
@ -33,6 +35,7 @@ set (SRCS
|
||||
safeExit.cpp
|
||||
throwError.cpp
|
||||
Numa.cpp
|
||||
GitHash.generated.cpp
|
||||
)
|
||||
|
||||
add_library (common ${SRCS})
|
||||
|
@ -27,27 +27,6 @@ bool cgroupsV2Enabled()
|
||||
#endif
|
||||
}
|
||||
|
||||
bool cgroupsV2MemoryControllerEnabled()
|
||||
{
|
||||
#if defined(OS_LINUX)
|
||||
chassert(cgroupsV2Enabled());
|
||||
/// According to https://docs.kernel.org/admin-guide/cgroup-v2.html, file "cgroup.controllers" defines which controllers are available
|
||||
/// for the current + child cgroups. The set of available controllers can be restricted from level to level using file
|
||||
/// "cgroups.subtree_control". It is therefore sufficient to check the bottom-most nested "cgroup.controllers" file.
|
||||
fs::path cgroup_dir = cgroupV2PathOfProcess();
|
||||
if (cgroup_dir.empty())
|
||||
return false;
|
||||
std::ifstream controllers_file(cgroup_dir / "cgroup.controllers");
|
||||
if (!controllers_file.is_open())
|
||||
return false;
|
||||
std::string controllers;
|
||||
std::getline(controllers_file, controllers);
|
||||
return controllers.find("memory") != std::string::npos;
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
fs::path cgroupV2PathOfProcess()
|
||||
{
|
||||
#if defined(OS_LINUX)
|
||||
@ -71,3 +50,28 @@ fs::path cgroupV2PathOfProcess()
|
||||
return {};
|
||||
#endif
|
||||
}
|
||||
|
||||
std::optional<std::string> getCgroupsV2PathContainingFile([[maybe_unused]] std::string_view file_name)
|
||||
{
|
||||
#if defined(OS_LINUX)
|
||||
if (!cgroupsV2Enabled())
|
||||
return {};
|
||||
|
||||
fs::path current_cgroup = cgroupV2PathOfProcess();
|
||||
if (current_cgroup.empty())
|
||||
return {};
|
||||
|
||||
/// Return the bottom-most nested file. If there is no such file at the current
|
||||
/// level, try again at the parent level as settings are inherited.
|
||||
while (current_cgroup != default_cgroups_mount.parent_path())
|
||||
{
|
||||
const auto path = current_cgroup / file_name;
|
||||
if (fs::exists(path))
|
||||
return {current_cgroup};
|
||||
current_cgroup = current_cgroup.parent_path();
|
||||
}
|
||||
return {};
|
||||
#else
|
||||
return {};
|
||||
#endif
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
#pragma once
|
||||
|
||||
#include <filesystem>
|
||||
#include <string_view>
|
||||
|
||||
#if defined(OS_LINUX)
|
||||
/// I think it is possible to mount the cgroups hierarchy somewhere else (e.g. when in containers).
|
||||
@ -11,11 +12,11 @@ static inline const std::filesystem::path default_cgroups_mount = "/sys/fs/cgrou
|
||||
/// Is cgroups v2 enabled on the system?
|
||||
bool cgroupsV2Enabled();
|
||||
|
||||
/// Is the memory controller of cgroups v2 enabled on the system?
|
||||
/// Assumes that cgroupsV2Enabled() is enabled.
|
||||
bool cgroupsV2MemoryControllerEnabled();
|
||||
|
||||
/// Detects which cgroup v2 the process belongs to and returns the filesystem path to the cgroup.
|
||||
/// Returns an empty path the cgroup cannot be determined.
|
||||
/// Assumes that cgroupsV2Enabled() is enabled.
|
||||
std::filesystem::path cgroupV2PathOfProcess();
|
||||
|
||||
/// Returns the most nested cgroup dir containing the specified file.
|
||||
/// If cgroups v2 is not enabled - returns an empty optional.
|
||||
std::optional<std::string> getCgroupsV2PathContainingFile([[maybe_unused]] std::string_view file_name);
|
||||
|
@ -19,9 +19,6 @@ std::optional<uint64_t> getCgroupsV2MemoryLimit()
|
||||
if (!cgroupsV2Enabled())
|
||||
return {};
|
||||
|
||||
if (!cgroupsV2MemoryControllerEnabled())
|
||||
return {};
|
||||
|
||||
std::filesystem::path current_cgroup = cgroupV2PathOfProcess();
|
||||
if (current_cgroup.empty())
|
||||
return {};
|
||||
|
@ -66,13 +66,11 @@ TRAP(gethostbyname)
|
||||
TRAP(gethostbyname2)
|
||||
TRAP(gethostent)
|
||||
TRAP(getlogin)
|
||||
TRAP(getmntent)
|
||||
TRAP(getnetbyaddr)
|
||||
TRAP(getnetbyname)
|
||||
TRAP(getnetent)
|
||||
TRAP(getnetgrent)
|
||||
TRAP(getnetgrent_r)
|
||||
TRAP(getopt)
|
||||
TRAP(getopt_long)
|
||||
TRAP(getopt_long_only)
|
||||
TRAP(getpass)
|
||||
@ -133,7 +131,6 @@ TRAP(nrand48)
|
||||
TRAP(__ppc_get_timebase_freq)
|
||||
TRAP(ptsname)
|
||||
TRAP(putchar_unlocked)
|
||||
TRAP(putenv)
|
||||
TRAP(pututline)
|
||||
TRAP(pututxline)
|
||||
TRAP(putwchar_unlocked)
|
||||
@ -148,7 +145,6 @@ TRAP(sethostent)
|
||||
TRAP(sethostid)
|
||||
TRAP(setkey)
|
||||
//TRAP(setlocale) // Used by replxx at startup
|
||||
TRAP(setlogmask)
|
||||
TRAP(setnetent)
|
||||
TRAP(setnetgrent)
|
||||
TRAP(setprotoent)
|
||||
@ -203,7 +199,6 @@ TRAP(lgammal)
|
||||
TRAP(nftw)
|
||||
TRAP(nl_langinfo)
|
||||
TRAP(putc_unlocked)
|
||||
TRAP(rand)
|
||||
/** In the current POSIX.1 specification (POSIX.1-2008), readdir() is not required to be thread-safe. However, in modern
|
||||
* implementations (including the glibc implementation), concurrent calls to readdir() that specify different directory streams
|
||||
* are thread-safe. In cases where multiple threads must read from the same directory stream, using readdir() with external
|
||||
@ -288,4 +283,14 @@ TRAP(tss_get)
|
||||
TRAP(tss_set)
|
||||
TRAP(tss_delete)
|
||||
|
||||
#ifndef USE_MUSL
|
||||
/// These produce duplicate symbol errors when statically linking with musl.
|
||||
/// Maybe we can remove them from the musl fork.
|
||||
TRAP(getopt)
|
||||
TRAP(putenv)
|
||||
TRAP(setlogmask)
|
||||
TRAP(rand)
|
||||
TRAP(getmntent)
|
||||
#endif
|
||||
|
||||
#endif
|
||||
|
@ -3,7 +3,11 @@ add_subdirectory (Data)
|
||||
add_subdirectory (Data/ODBC)
|
||||
add_subdirectory (Foundation)
|
||||
add_subdirectory (JSON)
|
||||
add_subdirectory (MongoDB)
|
||||
|
||||
if (USE_MONGODB)
|
||||
add_subdirectory(MongoDB)
|
||||
endif()
|
||||
|
||||
add_subdirectory (Net)
|
||||
add_subdirectory (NetSSL_OpenSSL)
|
||||
add_subdirectory (Redis)
|
||||
|
@ -188,8 +188,9 @@ namespace Crypto
|
||||
pFile = fopen(keyFile.c_str(), "r");
|
||||
if (pFile)
|
||||
{
|
||||
pem_password_cb * pCB = pass.empty() ? (pem_password_cb *)0 : &passCB;
|
||||
void * pPassword = pass.empty() ? (void *)0 : (void *)pass.c_str();
|
||||
pem_password_cb * pCB = &passCB;
|
||||
static constexpr char * no_password = "";
|
||||
void * pPassword = pass.empty() ? (void *)no_password : (void *)pass.c_str();
|
||||
if (readFunc(pFile, &pKey, pCB, pPassword))
|
||||
{
|
||||
fclose(pFile);
|
||||
@ -225,6 +226,13 @@ namespace Crypto
|
||||
error:
|
||||
if (pFile)
|
||||
fclose(pFile);
|
||||
if (*ppKey)
|
||||
{
|
||||
if constexpr (std::is_same_v<K, EVP_PKEY>)
|
||||
EVP_PKEY_free(*ppKey);
|
||||
else
|
||||
EC_KEY_free(*ppKey);
|
||||
}
|
||||
throw OpenSSLException("EVPKey::loadKey(string)");
|
||||
}
|
||||
|
||||
@ -286,6 +294,13 @@ namespace Crypto
|
||||
error:
|
||||
if (pBIO)
|
||||
BIO_free(pBIO);
|
||||
if (*ppKey)
|
||||
{
|
||||
if constexpr (std::is_same_v<K, EVP_PKEY>)
|
||||
EVP_PKEY_free(*ppKey);
|
||||
else
|
||||
EC_KEY_free(*ppKey);
|
||||
}
|
||||
throw OpenSSLException("EVPKey::loadKey(stream)");
|
||||
}
|
||||
|
||||
|
@ -21,6 +21,7 @@
|
||||
#include "Poco/Exception.h"
|
||||
#include "Poco/Foundation.h"
|
||||
#include "Poco/Mutex.h"
|
||||
#include "Poco/Message.h"
|
||||
|
||||
|
||||
namespace Poco
|
||||
@ -78,6 +79,10 @@ public:
|
||||
///
|
||||
/// The default implementation just breaks into the debugger.
|
||||
|
||||
virtual void logMessageImpl(Message::Priority priority, const std::string & msg) {}
|
||||
/// Write a messages to the log
|
||||
/// Useful for logging from Poco
|
||||
|
||||
static void handle(const Exception & exc);
|
||||
/// Invokes the currently registered ErrorHandler.
|
||||
|
||||
@ -87,6 +92,9 @@ public:
|
||||
static void handle();
|
||||
/// Invokes the currently registered ErrorHandler.
|
||||
|
||||
static void logMessage(Message::Priority priority, const std::string & msg);
|
||||
/// Invokes the currently registered ErrorHandler to log a message.
|
||||
|
||||
static ErrorHandler * set(ErrorHandler * pHandler);
|
||||
/// Registers the given handler as the current error handler.
|
||||
///
|
||||
|
@ -8,7 +8,7 @@
|
||||
// Copyright (c) 2005-2006, Applied Informatics Software Engineering GmbH.
|
||||
// and Contributors.
|
||||
//
|
||||
// SPDX-License-Identifier: BSL-1.0
|
||||
// SPDX-License-Identifier: BSL-1.0
|
||||
//
|
||||
|
||||
|
||||
@ -35,79 +35,91 @@ ErrorHandler::~ErrorHandler()
|
||||
|
||||
void ErrorHandler::exception(const Exception& exc)
|
||||
{
|
||||
poco_debugger_msg(exc.what());
|
||||
poco_debugger_msg(exc.what());
|
||||
}
|
||||
|
||||
|
||||
|
||||
void ErrorHandler::exception(const std::exception& exc)
|
||||
{
|
||||
poco_debugger_msg(exc.what());
|
||||
poco_debugger_msg(exc.what());
|
||||
}
|
||||
|
||||
|
||||
void ErrorHandler::exception()
|
||||
{
|
||||
poco_debugger_msg("unknown exception");
|
||||
poco_debugger_msg("unknown exception");
|
||||
}
|
||||
|
||||
|
||||
void ErrorHandler::handle(const Exception& exc)
|
||||
{
|
||||
FastMutex::ScopedLock lock(_mutex);
|
||||
try
|
||||
{
|
||||
_pHandler->exception(exc);
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
}
|
||||
FastMutex::ScopedLock lock(_mutex);
|
||||
try
|
||||
{
|
||||
_pHandler->exception(exc);
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
void ErrorHandler::handle(const std::exception& exc)
|
||||
{
|
||||
FastMutex::ScopedLock lock(_mutex);
|
||||
try
|
||||
{
|
||||
_pHandler->exception(exc);
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
}
|
||||
FastMutex::ScopedLock lock(_mutex);
|
||||
try
|
||||
{
|
||||
_pHandler->exception(exc);
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void ErrorHandler::handle()
|
||||
{
|
||||
FastMutex::ScopedLock lock(_mutex);
|
||||
try
|
||||
{
|
||||
_pHandler->exception();
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
}
|
||||
FastMutex::ScopedLock lock(_mutex);
|
||||
try
|
||||
{
|
||||
_pHandler->exception();
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
}
|
||||
}
|
||||
|
||||
void ErrorHandler::logMessage(Message::Priority priority, const std::string & msg)
|
||||
{
|
||||
FastMutex::ScopedLock lock(_mutex);
|
||||
try
|
||||
{
|
||||
_pHandler->logMessageImpl(priority, msg);
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
ErrorHandler* ErrorHandler::set(ErrorHandler* pHandler)
|
||||
{
|
||||
poco_check_ptr(pHandler);
|
||||
poco_check_ptr(pHandler);
|
||||
|
||||
FastMutex::ScopedLock lock(_mutex);
|
||||
ErrorHandler* pOld = _pHandler;
|
||||
_pHandler = pHandler;
|
||||
return pOld;
|
||||
FastMutex::ScopedLock lock(_mutex);
|
||||
ErrorHandler* pOld = _pHandler;
|
||||
_pHandler = pHandler;
|
||||
return pOld;
|
||||
}
|
||||
|
||||
|
||||
ErrorHandler* ErrorHandler::defaultHandler()
|
||||
{
|
||||
// NOTE: Since this is called to initialize the static _pHandler
|
||||
// variable, sh has to be a local static, otherwise we run
|
||||
// into static initialization order issues.
|
||||
static SingletonHolder<ErrorHandler> sh;
|
||||
return sh.get();
|
||||
// NOTE: Since this is called to initialize the static _pHandler
|
||||
// variable, sh has to be a local static, otherwise we run
|
||||
// into static initialization order issues.
|
||||
static SingletonHolder<ErrorHandler> sh;
|
||||
return sh.get();
|
||||
}
|
||||
|
||||
|
||||
|
@ -48,25 +48,17 @@ std::string PathImpl::currentImpl()
|
||||
std::string PathImpl::homeImpl()
|
||||
{
|
||||
std::string path;
|
||||
#if defined(_POSIX_C_SOURCE) || defined(_BSD_SOURCE) || defined(_POSIX_C_SOURCE)
|
||||
size_t buf_size = 1024; // Same as glibc use for getpwuid
|
||||
std::vector<char> buf(buf_size);
|
||||
struct passwd res;
|
||||
struct passwd* pwd = nullptr;
|
||||
|
||||
getpwuid_r(getuid(), &res, buf.data(), buf_size, &pwd);
|
||||
#else
|
||||
struct passwd* pwd = getpwuid(getuid());
|
||||
#endif
|
||||
if (pwd)
|
||||
path = pwd->pw_dir;
|
||||
else
|
||||
{
|
||||
#if defined(_POSIX_C_SOURCE) || defined(_BSD_SOURCE) || defined(_POSIX_C_SOURCE)
|
||||
getpwuid_r(getuid(), &res, buf.data(), buf_size, &pwd);
|
||||
#else
|
||||
pwd = getpwuid(geteuid());
|
||||
#endif
|
||||
if (pwd)
|
||||
path = pwd->pw_dir;
|
||||
else
|
||||
@ -82,7 +74,7 @@ std::string PathImpl::configHomeImpl()
|
||||
{
|
||||
std::string path = PathImpl::homeImpl();
|
||||
std::string::size_type n = path.size();
|
||||
if (n > 0 && path[n - 1] == '/')
|
||||
if (n > 0 && path[n - 1] == '/')
|
||||
#if POCO_OS == POCO_OS_MAC_OS_X
|
||||
path.append("Library/Preferences/");
|
||||
#else
|
||||
@ -97,7 +89,7 @@ std::string PathImpl::dataHomeImpl()
|
||||
{
|
||||
std::string path = PathImpl::homeImpl();
|
||||
std::string::size_type n = path.size();
|
||||
if (n > 0 && path[n - 1] == '/')
|
||||
if (n > 0 && path[n - 1] == '/')
|
||||
#if POCO_OS == POCO_OS_MAC_OS_X
|
||||
path.append("Library/Application Support/");
|
||||
#else
|
||||
@ -112,7 +104,7 @@ std::string PathImpl::cacheHomeImpl()
|
||||
{
|
||||
std::string path = PathImpl::homeImpl();
|
||||
std::string::size_type n = path.size();
|
||||
if (n > 0 && path[n - 1] == '/')
|
||||
if (n > 0 && path[n - 1] == '/')
|
||||
#if POCO_OS == POCO_OS_MAC_OS_X
|
||||
path.append("Library/Caches/");
|
||||
#else
|
||||
@ -127,7 +119,7 @@ std::string PathImpl::tempHomeImpl()
|
||||
{
|
||||
std::string path = PathImpl::homeImpl();
|
||||
std::string::size_type n = path.size();
|
||||
if (n > 0 && path[n - 1] == '/')
|
||||
if (n > 0 && path[n - 1] == '/')
|
||||
#if POCO_OS == POCO_OS_MAC_OS_X
|
||||
path.append("Library/Caches/");
|
||||
#else
|
||||
@ -159,7 +151,7 @@ std::string PathImpl::tempImpl()
|
||||
std::string PathImpl::configImpl()
|
||||
{
|
||||
std::string path;
|
||||
|
||||
|
||||
#if POCO_OS == POCO_OS_MAC_OS_X
|
||||
path = "/Library/Preferences/";
|
||||
#else
|
||||
|
@ -18,7 +18,9 @@
|
||||
#define Net_HTTPResponse_INCLUDED
|
||||
|
||||
|
||||
#include <map>
|
||||
#include <vector>
|
||||
|
||||
#include "Poco/Net/HTTPCookie.h"
|
||||
#include "Poco/Net/HTTPMessage.h"
|
||||
#include "Poco/Net/Net.h"
|
||||
@ -180,6 +182,8 @@ namespace Net
|
||||
/// May throw an exception in case of a malformed
|
||||
/// Set-Cookie header.
|
||||
|
||||
void getHeaders(std::map<std::string, std::string> & headers) const;
|
||||
|
||||
void write(std::ostream & ostr) const;
|
||||
/// Writes the HTTP response to the given
|
||||
/// output stream.
|
||||
|
@ -58,6 +58,10 @@ namespace Net
|
||||
|
||||
void setKeepAliveTimeout(Poco::Timespan keepAliveTimeout);
|
||||
|
||||
size_t getKeepAliveTimeout() const { return _keepAliveTimeout.totalSeconds(); }
|
||||
|
||||
size_t getMaxKeepAliveRequests() const { return _maxKeepAliveRequests; }
|
||||
|
||||
private:
|
||||
bool _firstRequest;
|
||||
Poco::Timespan _keepAliveTimeout;
|
||||
|
@ -19,6 +19,8 @@
|
||||
|
||||
|
||||
#include <ios>
|
||||
#include <memory>
|
||||
#include <functional>
|
||||
#include "Poco/Any.h"
|
||||
#include "Poco/Buffer.h"
|
||||
#include "Poco/Exception.h"
|
||||
@ -33,6 +35,27 @@ namespace Net
|
||||
{
|
||||
|
||||
|
||||
class IHTTPSessionDataHooks
|
||||
/// Interface to control stream of data bytes being sent or received though socket by HTTPSession
|
||||
/// It allows to monitor, throttle and schedule data streams with syscall granulatrity
|
||||
{
|
||||
public:
|
||||
virtual ~IHTTPSessionDataHooks() = default;
|
||||
|
||||
virtual void atStart(int bytes) = 0;
|
||||
/// Called before sending/receiving data `bytes` to/from socket.
|
||||
|
||||
virtual void atFinish(int bytes) = 0;
|
||||
/// Called when sending/receiving of data `bytes` is successfully finished.
|
||||
|
||||
virtual void atFail() = 0;
|
||||
/// If an error occurred during send/receive `fail()` is called instead of `finish()`.
|
||||
};
|
||||
|
||||
|
||||
using HTTPSessionDataHooksPtr = std::shared_ptr<IHTTPSessionDataHooks>;
|
||||
|
||||
|
||||
class Net_API HTTPSession
|
||||
/// HTTPSession implements basic HTTP session management
|
||||
/// for both HTTP clients and HTTP servers.
|
||||
@ -73,6 +96,12 @@ namespace Net
|
||||
Poco::Timespan getReceiveTimeout() const;
|
||||
/// Returns receive timeout for the HTTP session.
|
||||
|
||||
void setSendDataHooks(const HTTPSessionDataHooksPtr & sendDataHooks = {});
|
||||
/// Sets data hooks that will be called on every sent to the socket.
|
||||
|
||||
void setReceiveDataHooks(const HTTPSessionDataHooksPtr & receiveDataHooks = {});
|
||||
/// Sets data hooks that will be called on every receive from the socket.
|
||||
|
||||
bool connected() const;
|
||||
/// Returns true if the underlying socket is connected.
|
||||
|
||||
@ -211,6 +240,10 @@ namespace Net
|
||||
Poco::Exception * _pException;
|
||||
Poco::Any _data;
|
||||
|
||||
// Data hooks
|
||||
HTTPSessionDataHooksPtr _sendDataHooks;
|
||||
HTTPSessionDataHooksPtr _receiveDataHooks;
|
||||
|
||||
friend class HTTPStreamBuf;
|
||||
friend class HTTPHeaderStreamBuf;
|
||||
friend class HTTPFixedLengthStreamBuf;
|
||||
@ -246,6 +279,16 @@ namespace Net
|
||||
return _receiveTimeout;
|
||||
}
|
||||
|
||||
inline void HTTPSession::setSendDataHooks(const HTTPSessionDataHooksPtr & sendDataHooks)
|
||||
{
|
||||
_sendDataHooks = sendDataHooks;
|
||||
}
|
||||
|
||||
inline void HTTPSession::setReceiveDataHooks(const HTTPSessionDataHooksPtr & receiveDataHooks)
|
||||
{
|
||||
_receiveDataHooks = receiveDataHooks;
|
||||
}
|
||||
|
||||
inline StreamSocket & HTTPSession::socket()
|
||||
{
|
||||
return _socket;
|
||||
|
@ -209,6 +209,15 @@ void HTTPResponse::getCookies(std::vector<HTTPCookie>& cookies) const
|
||||
}
|
||||
}
|
||||
|
||||
void HTTPResponse::getHeaders(std::map<std::string, std::string> & headers) const
|
||||
{
|
||||
headers.clear();
|
||||
for (const auto & it : *this)
|
||||
{
|
||||
headers.emplace(it.first, it.second);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void HTTPResponse::write(std::ostream& ostr) const
|
||||
{
|
||||
|
@ -19,11 +19,11 @@ namespace Poco {
|
||||
namespace Net {
|
||||
|
||||
|
||||
HTTPServerSession::HTTPServerSession(const StreamSocket& socket, HTTPServerParams::Ptr pParams):
|
||||
HTTPSession(socket, pParams->getKeepAlive()),
|
||||
_firstRequest(true),
|
||||
_keepAliveTimeout(pParams->getKeepAliveTimeout()),
|
||||
_maxKeepAliveRequests(pParams->getMaxKeepAliveRequests())
|
||||
HTTPServerSession::HTTPServerSession(const StreamSocket & socket, HTTPServerParams::Ptr pParams)
|
||||
: HTTPSession(socket, pParams->getKeepAlive())
|
||||
, _firstRequest(true)
|
||||
, _keepAliveTimeout(pParams->getKeepAliveTimeout())
|
||||
, _maxKeepAliveRequests(pParams->getMaxKeepAliveRequests())
|
||||
{
|
||||
setTimeout(pParams->getTimeout());
|
||||
}
|
||||
@ -52,11 +52,12 @@ bool HTTPServerSession::hasMoreRequests()
|
||||
}
|
||||
else if (_maxKeepAliveRequests != 0 && getKeepAlive())
|
||||
{
|
||||
if (_maxKeepAliveRequests > 0)
|
||||
--_maxKeepAliveRequests;
|
||||
return buffered() > 0 || socket().poll(_keepAliveTimeout, Socket::SELECT_READ);
|
||||
}
|
||||
else return false;
|
||||
if (_maxKeepAliveRequests > 0)
|
||||
--_maxKeepAliveRequests;
|
||||
return buffered() > 0 || socket().poll(_keepAliveTimeout, Socket::SELECT_READ);
|
||||
}
|
||||
else
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
|
@ -128,14 +128,14 @@ int HTTPSession::get()
|
||||
{
|
||||
if (_pCurrent == _pEnd)
|
||||
refill();
|
||||
|
||||
|
||||
if (_pCurrent < _pEnd)
|
||||
return *_pCurrent++;
|
||||
else
|
||||
return std::char_traits<char>::eof();
|
||||
}
|
||||
|
||||
|
||||
|
||||
int HTTPSession::peek()
|
||||
{
|
||||
if (_pCurrent == _pEnd)
|
||||
@ -147,7 +147,7 @@ int HTTPSession::peek()
|
||||
return std::char_traits<char>::eof();
|
||||
}
|
||||
|
||||
|
||||
|
||||
int HTTPSession::read(char* buffer, std::streamsize length)
|
||||
{
|
||||
if (_pCurrent < _pEnd)
|
||||
@ -166,10 +166,17 @@ int HTTPSession::write(const char* buffer, std::streamsize length)
|
||||
{
|
||||
try
|
||||
{
|
||||
return _socket.sendBytes(buffer, (int) length);
|
||||
if (_sendDataHooks)
|
||||
_sendDataHooks->atStart((int) length);
|
||||
int result = _socket.sendBytes(buffer, (int) length);
|
||||
if (_sendDataHooks)
|
||||
_sendDataHooks->atFinish(result);
|
||||
return result;
|
||||
}
|
||||
catch (Poco::Exception& exc)
|
||||
{
|
||||
if (_sendDataHooks)
|
||||
_sendDataHooks->atFail();
|
||||
setException(exc);
|
||||
throw;
|
||||
}
|
||||
@ -180,10 +187,17 @@ int HTTPSession::receive(char* buffer, int length)
|
||||
{
|
||||
try
|
||||
{
|
||||
return _socket.receiveBytes(buffer, length);
|
||||
if (_receiveDataHooks)
|
||||
_receiveDataHooks->atStart(length);
|
||||
int result = _socket.receiveBytes(buffer, length);
|
||||
if (_receiveDataHooks)
|
||||
_receiveDataHooks->atFinish(result);
|
||||
return result;
|
||||
}
|
||||
catch (Poco::Exception& exc)
|
||||
{
|
||||
if (_receiveDataHooks)
|
||||
_receiveDataHooks->atFail();
|
||||
setException(exc);
|
||||
throw;
|
||||
}
|
||||
|
@ -17,6 +17,7 @@
|
||||
#include "Poco/Net/StreamSocketImpl.h"
|
||||
#include "Poco/NumberFormatter.h"
|
||||
#include "Poco/Timestamp.h"
|
||||
#include "Poco/ErrorHandler.h"
|
||||
#include <string.h> // FD_SET needs memset on some platforms, so we can't use <cstring>
|
||||
|
||||
|
||||
@ -62,7 +63,7 @@ bool checkIsBrokenTimeout()
|
||||
|
||||
SocketImpl::SocketImpl():
|
||||
_sockfd(POCO_INVALID_SOCKET),
|
||||
_blocking(true),
|
||||
_blocking(true),
|
||||
_isBrokenTimeout(checkIsBrokenTimeout())
|
||||
{
|
||||
}
|
||||
@ -81,7 +82,7 @@ SocketImpl::~SocketImpl()
|
||||
close();
|
||||
}
|
||||
|
||||
|
||||
|
||||
SocketImpl* SocketImpl::acceptConnection(SocketAddress& clientAddr)
|
||||
{
|
||||
if (_sockfd == POCO_INVALID_SOCKET) throw InvalidSocketException();
|
||||
@ -117,7 +118,7 @@ void SocketImpl::connect(const SocketAddress& address)
|
||||
rc = ::connect(_sockfd, address.addr(), address.length());
|
||||
}
|
||||
while (rc != 0 && lastError() == POCO_EINTR);
|
||||
if (rc != 0)
|
||||
if (rc != 0)
|
||||
{
|
||||
int err = lastError();
|
||||
error(err, address.toString());
|
||||
@ -204,7 +205,7 @@ void SocketImpl::bind6(const SocketAddress& address, bool reuseAddress, bool reu
|
||||
#if defined(POCO_HAVE_IPv6)
|
||||
if (address.family() != SocketAddress::IPv6)
|
||||
throw Poco::InvalidArgumentException("SocketAddress must be an IPv6 address");
|
||||
|
||||
|
||||
if (_sockfd == POCO_INVALID_SOCKET)
|
||||
{
|
||||
init(address.af());
|
||||
@ -225,11 +226,11 @@ void SocketImpl::bind6(const SocketAddress& address, bool reuseAddress, bool reu
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
|
||||
void SocketImpl::listen(int backlog)
|
||||
{
|
||||
if (_sockfd == POCO_INVALID_SOCKET) throw InvalidSocketException();
|
||||
|
||||
|
||||
int rc = ::listen(_sockfd, backlog);
|
||||
if (rc != 0) error();
|
||||
}
|
||||
@ -253,7 +254,7 @@ void SocketImpl::shutdownReceive()
|
||||
if (rc != 0) error();
|
||||
}
|
||||
|
||||
|
||||
|
||||
void SocketImpl::shutdownSend()
|
||||
{
|
||||
if (_sockfd == POCO_INVALID_SOCKET) throw InvalidSocketException();
|
||||
@ -262,7 +263,7 @@ void SocketImpl::shutdownSend()
|
||||
if (rc != 0) error();
|
||||
}
|
||||
|
||||
|
||||
|
||||
void SocketImpl::shutdown()
|
||||
{
|
||||
if (_sockfd == POCO_INVALID_SOCKET) throw InvalidSocketException();
|
||||
@ -317,7 +318,7 @@ int SocketImpl::receiveBytes(void* buffer, int length, int flags)
|
||||
throw TimeoutException();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
int rc;
|
||||
do
|
||||
{
|
||||
@ -325,7 +326,7 @@ int SocketImpl::receiveBytes(void* buffer, int length, int flags)
|
||||
rc = ::recv(_sockfd, reinterpret_cast<char*>(buffer), length, flags);
|
||||
}
|
||||
while (blocking && rc < 0 && lastError() == POCO_EINTR);
|
||||
if (rc < 0)
|
||||
if (rc < 0)
|
||||
{
|
||||
int err = lastError();
|
||||
if ((err == POCO_EAGAIN || err == POCO_EWOULDBLOCK) && !blocking)
|
||||
@ -363,7 +364,7 @@ int SocketImpl::receiveFrom(void* buffer, int length, SocketAddress& address, in
|
||||
throw TimeoutException();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
sockaddr_storage abuffer;
|
||||
struct sockaddr* pSA = reinterpret_cast<struct sockaddr*>(&abuffer);
|
||||
poco_socklen_t saLen = sizeof(abuffer);
|
||||
@ -450,7 +451,7 @@ bool SocketImpl::pollImpl(Poco::Timespan& remainingTime, int mode)
|
||||
}
|
||||
while (rc < 0 && lastError() == POCO_EINTR);
|
||||
if (rc < 0) error();
|
||||
return rc > 0;
|
||||
return rc > 0;
|
||||
|
||||
#else
|
||||
|
||||
@ -493,7 +494,7 @@ bool SocketImpl::pollImpl(Poco::Timespan& remainingTime, int mode)
|
||||
}
|
||||
while (rc < 0 && errorCode == POCO_EINTR);
|
||||
if (rc < 0) error(errorCode);
|
||||
return rc > 0;
|
||||
return rc > 0;
|
||||
|
||||
#endif // POCO_HAVE_FD_POLL
|
||||
}
|
||||
@ -503,13 +504,13 @@ bool SocketImpl::poll(const Poco::Timespan& timeout, int mode)
|
||||
Poco::Timespan remainingTime(timeout);
|
||||
return pollImpl(remainingTime, mode);
|
||||
}
|
||||
|
||||
|
||||
void SocketImpl::setSendBufferSize(int size)
|
||||
{
|
||||
setOption(SOL_SOCKET, SO_SNDBUF, size);
|
||||
}
|
||||
|
||||
|
||||
|
||||
int SocketImpl::getSendBufferSize()
|
||||
{
|
||||
int result;
|
||||
@ -523,7 +524,7 @@ void SocketImpl::setReceiveBufferSize(int size)
|
||||
setOption(SOL_SOCKET, SO_RCVBUF, size);
|
||||
}
|
||||
|
||||
|
||||
|
||||
int SocketImpl::getReceiveBufferSize()
|
||||
{
|
||||
int result;
|
||||
@ -569,7 +570,7 @@ Poco::Timespan SocketImpl::getReceiveTimeout()
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
|
||||
SocketAddress SocketImpl::address()
|
||||
{
|
||||
if (_sockfd == POCO_INVALID_SOCKET) throw InvalidSocketException();
|
||||
@ -580,7 +581,7 @@ SocketAddress SocketImpl::address()
|
||||
int rc = ::getsockname(_sockfd, pSA, &saLen);
|
||||
if (rc == 0)
|
||||
return SocketAddress(pSA, saLen);
|
||||
else
|
||||
else
|
||||
error();
|
||||
return SocketAddress();
|
||||
}
|
||||
|
@ -8,7 +8,7 @@
|
||||
// Copyright (c) 2005-2006, Applied Informatics Software Engineering GmbH.
|
||||
// and Contributors.
|
||||
//
|
||||
// SPDX-License-Identifier: BSL-1.0
|
||||
// SPDX-License-Identifier: BSL-1.0
|
||||
//
|
||||
|
||||
|
||||
@ -44,190 +44,194 @@ TCPServerConnectionFilter::~TCPServerConnectionFilter()
|
||||
|
||||
|
||||
TCPServer::TCPServer(TCPServerConnectionFactory::Ptr pFactory, Poco::UInt16 portNumber, TCPServerParams::Ptr pParams):
|
||||
_socket(ServerSocket(portNumber)),
|
||||
_thread(threadName(_socket)),
|
||||
_stopped(true)
|
||||
{
|
||||
Poco::ThreadPool& pool = Poco::ThreadPool::defaultPool();
|
||||
if (pParams)
|
||||
{
|
||||
int toAdd = pParams->getMaxThreads() - pool.capacity();
|
||||
if (toAdd > 0) pool.addCapacity(toAdd);
|
||||
}
|
||||
_pDispatcher = new TCPServerDispatcher(pFactory, pool, pParams);
|
||||
|
||||
_socket(ServerSocket(portNumber)),
|
||||
_thread(threadName(_socket)),
|
||||
_stopped(true)
|
||||
{
|
||||
Poco::ThreadPool& pool = Poco::ThreadPool::defaultPool();
|
||||
if (pParams)
|
||||
{
|
||||
int toAdd = pParams->getMaxThreads() - pool.capacity();
|
||||
if (toAdd > 0) pool.addCapacity(toAdd);
|
||||
}
|
||||
_pDispatcher = new TCPServerDispatcher(pFactory, pool, pParams);
|
||||
|
||||
}
|
||||
|
||||
|
||||
TCPServer::TCPServer(TCPServerConnectionFactory::Ptr pFactory, const ServerSocket& socket, TCPServerParams::Ptr pParams):
|
||||
_socket(socket),
|
||||
_thread(threadName(socket)),
|
||||
_stopped(true)
|
||||
_socket(socket),
|
||||
_thread(threadName(socket)),
|
||||
_stopped(true)
|
||||
{
|
||||
Poco::ThreadPool& pool = Poco::ThreadPool::defaultPool();
|
||||
if (pParams)
|
||||
{
|
||||
int toAdd = pParams->getMaxThreads() - pool.capacity();
|
||||
if (toAdd > 0) pool.addCapacity(toAdd);
|
||||
}
|
||||
_pDispatcher = new TCPServerDispatcher(pFactory, pool, pParams);
|
||||
Poco::ThreadPool& pool = Poco::ThreadPool::defaultPool();
|
||||
if (pParams)
|
||||
{
|
||||
int toAdd = pParams->getMaxThreads() - pool.capacity();
|
||||
if (toAdd > 0) pool.addCapacity(toAdd);
|
||||
}
|
||||
_pDispatcher = new TCPServerDispatcher(pFactory, pool, pParams);
|
||||
}
|
||||
|
||||
|
||||
TCPServer::TCPServer(TCPServerConnectionFactory::Ptr pFactory, Poco::ThreadPool& threadPool, const ServerSocket& socket, TCPServerParams::Ptr pParams):
|
||||
_socket(socket),
|
||||
_pDispatcher(new TCPServerDispatcher(pFactory, threadPool, pParams)),
|
||||
_thread(threadName(socket)),
|
||||
_stopped(true)
|
||||
_socket(socket),
|
||||
_pDispatcher(new TCPServerDispatcher(pFactory, threadPool, pParams)),
|
||||
_thread(threadName(socket)),
|
||||
_stopped(true)
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
TCPServer::~TCPServer()
|
||||
{
|
||||
try
|
||||
{
|
||||
stop();
|
||||
_pDispatcher->release();
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
poco_unexpected();
|
||||
}
|
||||
try
|
||||
{
|
||||
stop();
|
||||
_pDispatcher->release();
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
poco_unexpected();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const TCPServerParams& TCPServer::params() const
|
||||
{
|
||||
return _pDispatcher->params();
|
||||
return _pDispatcher->params();
|
||||
}
|
||||
|
||||
|
||||
void TCPServer::start()
|
||||
{
|
||||
poco_assert (_stopped);
|
||||
poco_assert (_stopped);
|
||||
|
||||
_stopped = false;
|
||||
_thread.start(*this);
|
||||
_stopped = false;
|
||||
_thread.start(*this);
|
||||
}
|
||||
|
||||
|
||||
|
||||
void TCPServer::stop()
|
||||
{
|
||||
if (!_stopped)
|
||||
{
|
||||
_stopped = true;
|
||||
_thread.join();
|
||||
_pDispatcher->stop();
|
||||
}
|
||||
if (!_stopped)
|
||||
{
|
||||
_stopped = true;
|
||||
_thread.join();
|
||||
_pDispatcher->stop();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void TCPServer::run()
|
||||
{
|
||||
while (!_stopped)
|
||||
{
|
||||
Poco::Timespan timeout(250000);
|
||||
try
|
||||
{
|
||||
if (_socket.poll(timeout, Socket::SELECT_READ))
|
||||
{
|
||||
try
|
||||
{
|
||||
StreamSocket ss = _socket.acceptConnection();
|
||||
|
||||
if (!_pConnectionFilter || _pConnectionFilter->accept(ss))
|
||||
{
|
||||
// enable nodelay per default: OSX really needs that
|
||||
while (!_stopped)
|
||||
{
|
||||
Poco::Timespan timeout(250000);
|
||||
try
|
||||
{
|
||||
if (_socket.poll(timeout, Socket::SELECT_READ))
|
||||
{
|
||||
try
|
||||
{
|
||||
StreamSocket ss = _socket.acceptConnection();
|
||||
|
||||
if (!_pConnectionFilter || _pConnectionFilter->accept(ss))
|
||||
{
|
||||
// enable nodelay per default: OSX really needs that
|
||||
#if defined(POCO_OS_FAMILY_UNIX)
|
||||
if (ss.address().family() != AddressFamily::UNIX_LOCAL)
|
||||
if (ss.address().family() != AddressFamily::UNIX_LOCAL)
|
||||
#endif
|
||||
{
|
||||
ss.setNoDelay(true);
|
||||
}
|
||||
_pDispatcher->enqueue(ss);
|
||||
}
|
||||
}
|
||||
catch (Poco::Exception& exc)
|
||||
{
|
||||
ErrorHandler::handle(exc);
|
||||
}
|
||||
catch (std::exception& exc)
|
||||
{
|
||||
ErrorHandler::handle(exc);
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
ErrorHandler::handle();
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Poco::Exception& exc)
|
||||
{
|
||||
ErrorHandler::handle(exc);
|
||||
// possibly a resource issue since poll() failed;
|
||||
// give some time to recover before trying again
|
||||
Poco::Thread::sleep(50);
|
||||
}
|
||||
}
|
||||
{
|
||||
ss.setNoDelay(true);
|
||||
}
|
||||
_pDispatcher->enqueue(ss);
|
||||
}
|
||||
else
|
||||
{
|
||||
ErrorHandler::logMessage(Message::PRIO_WARNING, "Filtered out connection from " + ss.peerAddress().toString());
|
||||
}
|
||||
}
|
||||
catch (Poco::Exception& exc)
|
||||
{
|
||||
ErrorHandler::handle(exc);
|
||||
}
|
||||
catch (std::exception& exc)
|
||||
{
|
||||
ErrorHandler::handle(exc);
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
ErrorHandler::handle();
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Poco::Exception& exc)
|
||||
{
|
||||
ErrorHandler::handle(exc);
|
||||
// possibly a resource issue since poll() failed;
|
||||
// give some time to recover before trying again
|
||||
Poco::Thread::sleep(50);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
int TCPServer::currentThreads() const
|
||||
{
|
||||
return _pDispatcher->currentThreads();
|
||||
return _pDispatcher->currentThreads();
|
||||
}
|
||||
|
||||
|
||||
int TCPServer::maxThreads() const
|
||||
{
|
||||
return _pDispatcher->maxThreads();
|
||||
return _pDispatcher->maxThreads();
|
||||
}
|
||||
|
||||
|
||||
|
||||
int TCPServer::totalConnections() const
|
||||
{
|
||||
return _pDispatcher->totalConnections();
|
||||
return _pDispatcher->totalConnections();
|
||||
}
|
||||
|
||||
|
||||
int TCPServer::currentConnections() const
|
||||
{
|
||||
return _pDispatcher->currentConnections();
|
||||
return _pDispatcher->currentConnections();
|
||||
}
|
||||
|
||||
|
||||
int TCPServer::maxConcurrentConnections() const
|
||||
{
|
||||
return _pDispatcher->maxConcurrentConnections();
|
||||
return _pDispatcher->maxConcurrentConnections();
|
||||
}
|
||||
|
||||
|
||||
|
||||
int TCPServer::queuedConnections() const
|
||||
{
|
||||
return _pDispatcher->queuedConnections();
|
||||
return _pDispatcher->queuedConnections();
|
||||
}
|
||||
|
||||
|
||||
int TCPServer::refusedConnections() const
|
||||
{
|
||||
return _pDispatcher->refusedConnections();
|
||||
return _pDispatcher->refusedConnections();
|
||||
}
|
||||
|
||||
|
||||
void TCPServer::setConnectionFilter(const TCPServerConnectionFilter::Ptr& pConnectionFilter)
|
||||
{
|
||||
poco_assert (_stopped);
|
||||
poco_assert (_stopped);
|
||||
|
||||
_pConnectionFilter = pConnectionFilter;
|
||||
_pConnectionFilter = pConnectionFilter;
|
||||
}
|
||||
|
||||
|
||||
std::string TCPServer::threadName(const ServerSocket& socket)
|
||||
{
|
||||
std::string name("TCPServer: ");
|
||||
name.append(socket.address().toString());
|
||||
return name;
|
||||
std::string name("TCPServer: ");
|
||||
name.append(socket.address().toString());
|
||||
return name;
|
||||
|
||||
}
|
||||
|
||||
|
@ -8,7 +8,7 @@
|
||||
// Copyright (c) 2005-2007, Applied Informatics Software Engineering GmbH.
|
||||
// and Contributors.
|
||||
//
|
||||
// SPDX-License-Identifier: BSL-1.0
|
||||
// SPDX-License-Identifier: BSL-1.0
|
||||
//
|
||||
|
||||
|
||||
@ -33,44 +33,44 @@ namespace Net {
|
||||
class TCPConnectionNotification: public Notification
|
||||
{
|
||||
public:
|
||||
TCPConnectionNotification(const StreamSocket& socket):
|
||||
_socket(socket)
|
||||
{
|
||||
}
|
||||
|
||||
~TCPConnectionNotification()
|
||||
{
|
||||
}
|
||||
|
||||
const StreamSocket& socket() const
|
||||
{
|
||||
return _socket;
|
||||
}
|
||||
TCPConnectionNotification(const StreamSocket& socket):
|
||||
_socket(socket)
|
||||
{
|
||||
}
|
||||
|
||||
~TCPConnectionNotification()
|
||||
{
|
||||
}
|
||||
|
||||
const StreamSocket& socket() const
|
||||
{
|
||||
return _socket;
|
||||
}
|
||||
|
||||
private:
|
||||
StreamSocket _socket;
|
||||
StreamSocket _socket;
|
||||
};
|
||||
|
||||
|
||||
TCPServerDispatcher::TCPServerDispatcher(TCPServerConnectionFactory::Ptr pFactory, Poco::ThreadPool& threadPool, TCPServerParams::Ptr pParams):
|
||||
_rc(1),
|
||||
_pParams(pParams),
|
||||
_currentThreads(0),
|
||||
_totalConnections(0),
|
||||
_currentConnections(0),
|
||||
_maxConcurrentConnections(0),
|
||||
_refusedConnections(0),
|
||||
_stopped(false),
|
||||
_pConnectionFactory(pFactory),
|
||||
_threadPool(threadPool)
|
||||
_rc(1),
|
||||
_pParams(pParams),
|
||||
_currentThreads(0),
|
||||
_totalConnections(0),
|
||||
_currentConnections(0),
|
||||
_maxConcurrentConnections(0),
|
||||
_refusedConnections(0),
|
||||
_stopped(false),
|
||||
_pConnectionFactory(pFactory),
|
||||
_threadPool(threadPool)
|
||||
{
|
||||
poco_check_ptr (pFactory);
|
||||
poco_check_ptr (pFactory);
|
||||
|
||||
if (!_pParams)
|
||||
_pParams = new TCPServerParams;
|
||||
|
||||
if (_pParams->getMaxThreads() == 0)
|
||||
_pParams->setMaxThreads(threadPool.capacity());
|
||||
if (!_pParams)
|
||||
_pParams = new TCPServerParams;
|
||||
|
||||
if (_pParams->getMaxThreads() == 0)
|
||||
_pParams->setMaxThreads(threadPool.capacity());
|
||||
}
|
||||
|
||||
|
||||
@ -81,161 +81,184 @@ TCPServerDispatcher::~TCPServerDispatcher()
|
||||
|
||||
void TCPServerDispatcher::duplicate()
|
||||
{
|
||||
++_rc;
|
||||
++_rc;
|
||||
}
|
||||
|
||||
|
||||
void TCPServerDispatcher::release()
|
||||
{
|
||||
if (--_rc == 0) delete this;
|
||||
if (--_rc == 0) delete this;
|
||||
}
|
||||
|
||||
|
||||
void TCPServerDispatcher::run()
|
||||
{
|
||||
AutoPtr<TCPServerDispatcher> guard(this); // ensure object stays alive
|
||||
AutoPtr<TCPServerDispatcher> guard(this); // ensure object stays alive
|
||||
|
||||
int idleTime = (int) _pParams->getThreadIdleTime().totalMilliseconds();
|
||||
int idleTime = (int) _pParams->getThreadIdleTime().totalMilliseconds();
|
||||
|
||||
for (;;)
|
||||
{
|
||||
try
|
||||
{
|
||||
AutoPtr<Notification> pNf = _queue.waitDequeueNotification(idleTime);
|
||||
if (pNf && !_stopped)
|
||||
{
|
||||
TCPConnectionNotification* pCNf = dynamic_cast<TCPConnectionNotification*>(pNf.get());
|
||||
if (pCNf)
|
||||
{
|
||||
beginConnection();
|
||||
if (!_stopped)
|
||||
{
|
||||
std::unique_ptr<TCPServerConnection> pConnection(_pConnectionFactory->createConnection(pCNf->socket()));
|
||||
poco_check_ptr(pConnection.get());
|
||||
pConnection->start();
|
||||
}
|
||||
/// endConnection() should be called after destroying TCPServerConnection,
|
||||
/// otherwise currentConnections() could become zero while some connections are yet still alive.
|
||||
endConnection();
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Poco::Exception &exc) { ErrorHandler::handle(exc); }
|
||||
catch (std::exception &exc) { ErrorHandler::handle(exc); }
|
||||
catch (...) { ErrorHandler::handle(); }
|
||||
FastMutex::ScopedLock lock(_mutex);
|
||||
if (_stopped || (_currentThreads > 1 && _queue.empty()))
|
||||
{
|
||||
--_currentThreads;
|
||||
break;
|
||||
}
|
||||
}
|
||||
for (;;)
|
||||
{
|
||||
try
|
||||
{
|
||||
AutoPtr<Notification> pNf = _queue.waitDequeueNotification(idleTime);
|
||||
if (pNf && !_stopped)
|
||||
{
|
||||
TCPConnectionNotification* pCNf = dynamic_cast<TCPConnectionNotification*>(pNf.get());
|
||||
if (pCNf)
|
||||
{
|
||||
beginConnection();
|
||||
if (!_stopped)
|
||||
{
|
||||
std::unique_ptr<TCPServerConnection> pConnection(_pConnectionFactory->createConnection(pCNf->socket()));
|
||||
poco_check_ptr(pConnection.get());
|
||||
pConnection->start();
|
||||
}
|
||||
/// endConnection() should be called after destroying TCPServerConnection,
|
||||
/// otherwise currentConnections() could become zero while some connections are yet still alive.
|
||||
endConnection();
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Poco::Exception &exc) { ErrorHandler::handle(exc); }
|
||||
catch (std::exception &exc) { ErrorHandler::handle(exc); }
|
||||
catch (...) { ErrorHandler::handle(); }
|
||||
FastMutex::ScopedLock lock(_mutex);
|
||||
if (_stopped || (_currentThreads > 1 && _queue.empty()))
|
||||
{
|
||||
--_currentThreads;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
namespace
|
||||
{
|
||||
static const std::string threadName("TCPServerConnection");
|
||||
static const std::string threadName("TCPServerConnection");
|
||||
}
|
||||
|
||||
|
||||
|
||||
void TCPServerDispatcher::enqueue(const StreamSocket& socket)
|
||||
{
|
||||
FastMutex::ScopedLock lock(_mutex);
|
||||
FastMutex::ScopedLock lock(_mutex);
|
||||
|
||||
if (_queue.size() < _pParams->getMaxQueued())
|
||||
{
|
||||
if (!_queue.hasIdleThreads() && _currentThreads < _pParams->getMaxThreads())
|
||||
{
|
||||
try
|
||||
{
|
||||
ErrorHandler::logMessage(Message::PRIO_TEST, "Queue size: " + std::to_string(_queue.size()) +
|
||||
", current threads: " + std::to_string(_currentThreads) +
|
||||
", threads in pool: " + std::to_string(_threadPool.allocated()) +
|
||||
", current connections: " + std::to_string(_currentConnections));
|
||||
|
||||
|
||||
if (_queue.size() < _pParams->getMaxQueued())
|
||||
{
|
||||
/// NOTE: the condition below is wrong.
|
||||
/// Since the thread pool is shared between multiple servers/TCPServerDispatchers,
|
||||
/// _currentThreads < _pParams->getMaxThreads() will be true when the pool is actually saturated.
|
||||
/// As a result, queue is useless and connections never wait in queue.
|
||||
/// Instead, we (mistakenly) think that we can create a thread for this connection, but we fail to create it
|
||||
/// and the connection get rejected.
|
||||
/// We could check _currentThreads < _threadPool.allocated() to make it work,
|
||||
/// but it's not clear if we want to make it work
|
||||
/// because it may be better to reject connection immediately if we don't have resources to handle it.
|
||||
if (!_queue.hasIdleThreads() && _currentThreads < _pParams->getMaxThreads())
|
||||
{
|
||||
try
|
||||
{
|
||||
this->duplicate();
|
||||
_threadPool.startWithPriority(_pParams->getThreadPriority(), *this, threadName);
|
||||
++_currentThreads;
|
||||
}
|
||||
catch (Poco::Exception& exc)
|
||||
{
|
||||
_threadPool.startWithPriority(_pParams->getThreadPriority(), *this, threadName);
|
||||
++_currentThreads;
|
||||
}
|
||||
catch (Poco::Exception& exc)
|
||||
{
|
||||
ErrorHandler::logMessage(Message::PRIO_WARNING, "Got an exception while starting thread for connection from " +
|
||||
socket.peerAddress().toString());
|
||||
ErrorHandler::handle(exc);
|
||||
this->release();
|
||||
++_refusedConnections;
|
||||
std::cerr << "Got exception while starting thread for connection. Error code: "
|
||||
<< exc.code() << ", message: '" << exc.displayText() << "'" << std::endl;
|
||||
return;
|
||||
}
|
||||
}
|
||||
_queue.enqueueNotification(new TCPConnectionNotification(socket));
|
||||
}
|
||||
else
|
||||
{
|
||||
++_refusedConnections;
|
||||
}
|
||||
++_refusedConnections;
|
||||
return;
|
||||
}
|
||||
}
|
||||
else if (!_queue.hasIdleThreads())
|
||||
{
|
||||
ErrorHandler::logMessage(Message::PRIO_TRACE, "Don't have idle threads, adding connection from " +
|
||||
socket.peerAddress().toString() + " to the queue, size: " + std::to_string(_queue.size()));
|
||||
}
|
||||
_queue.enqueueNotification(new TCPConnectionNotification(socket));
|
||||
}
|
||||
else
|
||||
{
|
||||
ErrorHandler::logMessage(Message::PRIO_WARNING, "Refusing connection from " + socket.peerAddress().toString() +
|
||||
", reached max queue size " + std::to_string(_pParams->getMaxQueued()));
|
||||
++_refusedConnections;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void TCPServerDispatcher::stop()
|
||||
{
|
||||
_stopped = true;
|
||||
_queue.clear();
|
||||
_queue.wakeUpAll();
|
||||
_stopped = true;
|
||||
_queue.clear();
|
||||
_queue.wakeUpAll();
|
||||
}
|
||||
|
||||
|
||||
int TCPServerDispatcher::currentThreads() const
|
||||
{
|
||||
return _currentThreads;
|
||||
return _currentThreads;
|
||||
}
|
||||
|
||||
int TCPServerDispatcher::maxThreads() const
|
||||
{
|
||||
FastMutex::ScopedLock lock(_mutex);
|
||||
|
||||
return _threadPool.capacity();
|
||||
FastMutex::ScopedLock lock(_mutex);
|
||||
|
||||
return _threadPool.capacity();
|
||||
}
|
||||
|
||||
|
||||
int TCPServerDispatcher::totalConnections() const
|
||||
{
|
||||
return _totalConnections;
|
||||
return _totalConnections;
|
||||
}
|
||||
|
||||
|
||||
int TCPServerDispatcher::currentConnections() const
|
||||
{
|
||||
return _currentConnections;
|
||||
return _currentConnections;
|
||||
}
|
||||
|
||||
|
||||
int TCPServerDispatcher::maxConcurrentConnections() const
|
||||
{
|
||||
return _maxConcurrentConnections;
|
||||
return _maxConcurrentConnections;
|
||||
}
|
||||
|
||||
|
||||
int TCPServerDispatcher::queuedConnections() const
|
||||
{
|
||||
return _queue.size();
|
||||
return _queue.size();
|
||||
}
|
||||
|
||||
|
||||
int TCPServerDispatcher::refusedConnections() const
|
||||
{
|
||||
return _refusedConnections;
|
||||
return _refusedConnections;
|
||||
}
|
||||
|
||||
|
||||
void TCPServerDispatcher::beginConnection()
|
||||
{
|
||||
FastMutex::ScopedLock lock(_mutex);
|
||||
FastMutex::ScopedLock lock(_mutex);
|
||||
|
||||
++_totalConnections;
|
||||
++_currentConnections;
|
||||
if (_currentConnections > _maxConcurrentConnections)
|
||||
_maxConcurrentConnections.store(_currentConnections);
|
||||
++_totalConnections;
|
||||
++_currentConnections;
|
||||
if (_currentConnections > _maxConcurrentConnections)
|
||||
_maxConcurrentConnections.store(_currentConnections);
|
||||
}
|
||||
|
||||
|
||||
void TCPServerDispatcher::endConnection()
|
||||
{
|
||||
--_currentConnections;
|
||||
--_currentConnections;
|
||||
}
|
||||
|
||||
|
||||
|
@ -248,6 +248,9 @@ namespace Net
|
||||
SSL_CTX * sslContext() const;
|
||||
/// Returns the underlying OpenSSL SSL Context object.
|
||||
|
||||
SSL_CTX * takeSslContext();
|
||||
/// Takes ownership of the underlying OpenSSL SSL Context object.
|
||||
|
||||
Usage usage() const;
|
||||
/// Returns whether the context is for use by a client or by a server
|
||||
/// and whether TLSv1 is required.
|
||||
@ -401,6 +404,13 @@ namespace Net
|
||||
return _pSSLContext;
|
||||
}
|
||||
|
||||
inline SSL_CTX * Context::takeSslContext()
|
||||
{
|
||||
auto * result = _pSSLContext;
|
||||
_pSSLContext = nullptr;
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
inline bool Context::extendedCertificateVerificationEnabled() const
|
||||
{
|
||||
|
@ -106,6 +106,11 @@ Context::Context(
|
||||
|
||||
Context::~Context()
|
||||
{
|
||||
if (_pSSLContext == nullptr)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
SSL_CTX_free(_pSSLContext);
|
||||
|
@ -311,6 +311,14 @@ int SecureSocketImpl::sendBytes(const void* buffer, int length, int flags)
|
||||
while (mustRetry(rc, remaining_time));
|
||||
if (rc <= 0)
|
||||
{
|
||||
// At this stage we still can have last not yet received SSL message containing SSL error
|
||||
// so make a read to force SSL to process possible SSL error
|
||||
if (SSL_get_error(_pSSL, rc) == SSL_ERROR_SYSCALL && SocketImpl::lastError() == POCO_ECONNRESET)
|
||||
{
|
||||
char c = 0;
|
||||
SSL_read(_pSSL, &c, 1);
|
||||
}
|
||||
|
||||
rc = handleError(rc);
|
||||
if (rc == 0) throw SSLConnectionUnexpectedlyClosedException();
|
||||
}
|
||||
|
@ -18,4 +18,4 @@ target_compile_options (_poco_util
|
||||
-Wno-zero-as-null-pointer-constant
|
||||
)
|
||||
target_include_directories (_poco_util SYSTEM PUBLIC "include")
|
||||
target_link_libraries (_poco_util PUBLIC Poco::JSON Poco::XML)
|
||||
target_link_libraries (_poco_util PUBLIC Poco::JSON Poco::XML Poco::Net)
|
||||
|
@ -241,6 +241,20 @@ namespace Util
|
||||
/// If the value contains references to other properties (${<property>}), these
|
||||
/// are expanded.
|
||||
|
||||
std::string getHost(const std::string & key) const;
|
||||
/// Returns the string value of the host property with the given name.
|
||||
/// Throws a NotFoundException if the key does not exist.
|
||||
/// Throws a SyntaxException if the property is not a valid host (IP address or domain).
|
||||
/// If the value contains references to other properties (${<property>}), these
|
||||
/// are expanded.
|
||||
|
||||
std::string getHost(const std::string & key, const std::string & defaultValue) const;
|
||||
/// If a property with the given key exists, returns the host property's string value,
|
||||
/// otherwise returns the given default value.
|
||||
/// Throws a SyntaxException if the property is not a valid host (IP address or domain).
|
||||
/// If the value contains references to other properties (${<property>}), these
|
||||
/// are expanded.
|
||||
|
||||
virtual void setString(const std::string & key, const std::string & value);
|
||||
/// Sets the property with the given key to the given value.
|
||||
/// An already existing value for the key is overwritten.
|
||||
@ -339,12 +353,35 @@ namespace Util
|
||||
static bool parseBool(const std::string & value);
|
||||
void setRawWithEvent(const std::string & key, std::string value);
|
||||
|
||||
static void checkHostValidity(const std::string & value);
|
||||
/// Throws a SyntaxException if the value is not a valid host (IP address or domain).
|
||||
|
||||
virtual ~AbstractConfiguration();
|
||||
|
||||
private:
|
||||
std::string internalExpand(const std::string & value) const;
|
||||
std::string uncheckedExpand(const std::string & value) const;
|
||||
|
||||
static bool isValidIPv4Address(const std::string & value);
|
||||
/// IPv4 address considered valid if it is "0.0.0.0" or one of those,
|
||||
/// defined by inet_aton() or inet_addr()
|
||||
|
||||
static bool isValidIPv6Address(const std::string & value);
|
||||
/// IPv6 address considered valid if it is "::" or one of those,
|
||||
/// defined by inet_pton() with AF_INET6 flag
|
||||
/// (in this case it may have scope id and may be surrounded by '[', ']')
|
||||
|
||||
static bool isValidDomainName(const std::string & value);
|
||||
/// <domain> ::= <subdomain> [ "." ]
|
||||
/// <subdomain> ::= <label> | <subdomain> "." <label>
|
||||
/// <label> ::= <letter> [ [ <ldh-str> ] <let-dig> ]
|
||||
/// <ldh-str> ::= <let-dig-hyp> | <let-dig-hyp> <ldh-str>
|
||||
/// <let-dig-hyp> ::= <let-dig> | "-"
|
||||
/// <let-dig> ::= <letter> | <digit>
|
||||
/// <letter> ::= any one of the 52 alphabetic characters A through Z in
|
||||
/// upper case and a through z in lower case
|
||||
/// <digit> ::= any one of the ten digits 0 through 9
|
||||
|
||||
AbstractConfiguration(const AbstractConfiguration &);
|
||||
AbstractConfiguration & operator=(const AbstractConfiguration &);
|
||||
|
||||
|
@ -18,6 +18,7 @@
|
||||
#include "Poco/NumberParser.h"
|
||||
#include "Poco/NumberFormatter.h"
|
||||
#include "Poco/String.h"
|
||||
#include "Poco/Net/IPAddressImpl.h"
|
||||
|
||||
|
||||
using Poco::Mutex;
|
||||
@ -263,6 +264,41 @@ bool AbstractConfiguration::getBool(const std::string& key, bool defaultValue) c
|
||||
}
|
||||
|
||||
|
||||
std::string AbstractConfiguration::getHost(const std::string& key) const
|
||||
{
|
||||
Mutex::ScopedLock lock(_mutex);
|
||||
|
||||
std::string value;
|
||||
if (getRaw(key, value))
|
||||
{
|
||||
std::string expandedValue = internalExpand(value);
|
||||
checkHostValidity(expandedValue);
|
||||
return expandedValue;
|
||||
}
|
||||
else
|
||||
throw NotFoundException(key);
|
||||
}
|
||||
|
||||
|
||||
std::string AbstractConfiguration::getHost(const std::string& key, const std::string& defaultValue) const
|
||||
{
|
||||
Mutex::ScopedLock lock(_mutex);
|
||||
|
||||
std::string value;
|
||||
if (getRaw(key, value))
|
||||
{
|
||||
std::string expandedValue = internalExpand(value);
|
||||
checkHostValidity(expandedValue);
|
||||
return expandedValue;
|
||||
}
|
||||
else
|
||||
{
|
||||
checkHostValidity(defaultValue);
|
||||
return defaultValue;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void AbstractConfiguration::setString(const std::string& key, const std::string& value)
|
||||
{
|
||||
setRawWithEvent(key, value);
|
||||
@ -529,4 +565,68 @@ void AbstractConfiguration::setRawWithEvent(const std::string& key, std::string
|
||||
}
|
||||
|
||||
|
||||
void AbstractConfiguration::checkHostValidity(const std::string& value)
|
||||
{
|
||||
if (!isValidIPv4Address(value) && !isValidIPv6Address(value) && !isValidDomainName(value))
|
||||
{
|
||||
throw SyntaxException("Property is not a valid host name", value);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
bool AbstractConfiguration::isValidIPv4Address(const std::string& value)
|
||||
{
|
||||
using Poco::Net::Impl::IPv4AddressImpl;
|
||||
IPv4AddressImpl empty4 = IPv4AddressImpl();
|
||||
|
||||
IPv4AddressImpl ipAddress = IPv4AddressImpl::parse(value);
|
||||
return ipAddress != empty4 || value == "0.0.0.0";
|
||||
}
|
||||
|
||||
|
||||
bool AbstractConfiguration::isValidIPv6Address(const std::string& value)
|
||||
{
|
||||
#if defined(POCO_HAVE_IPv6)
|
||||
using Poco::Net::Impl::IPv6AddressImpl;
|
||||
IPv6AddressImpl empty6 = IPv6AddressImpl();
|
||||
|
||||
IPv6AddressImpl ipAddress = IPv6AddressImpl::parse(value);
|
||||
return ipAddress != empty6 || value == "::";
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
bool AbstractConfiguration::isValidDomainName(const std::string& value)
|
||||
{
|
||||
if (value.empty() || value == "." || value.length() > 253)
|
||||
return false;
|
||||
int labelLength = 0;
|
||||
char oldChar = 0;
|
||||
|
||||
for (char ch : value)
|
||||
{
|
||||
if (ch == '.')
|
||||
{
|
||||
if (labelLength == 0 || labelLength > 63 || oldChar == '-')
|
||||
return false;
|
||||
labelLength = 0;
|
||||
}
|
||||
else if (isalnum(ch) || ch == '-')
|
||||
{
|
||||
if (labelLength == 0 && (ch == '-' || isdigit(ch)))
|
||||
return false;
|
||||
++labelLength;
|
||||
}
|
||||
else
|
||||
{
|
||||
return false;
|
||||
}
|
||||
oldChar = ch;
|
||||
}
|
||||
return oldChar == '.' || (labelLength > 0 && labelLength <= 63 && oldChar != '-');
|
||||
}
|
||||
|
||||
|
||||
} } // namespace Poco::Util
|
||||
|
@ -2,11 +2,11 @@
|
||||
|
||||
# NOTE: VERSION_REVISION has nothing common with DBMS_TCP_PROTOCOL_VERSION,
|
||||
# only DBMS_TCP_PROTOCOL_VERSION should be incremented on protocol changes.
|
||||
SET(VERSION_REVISION 54489)
|
||||
SET(VERSION_REVISION 54491)
|
||||
SET(VERSION_MAJOR 24)
|
||||
SET(VERSION_MINOR 8)
|
||||
SET(VERSION_MINOR 10)
|
||||
SET(VERSION_PATCH 1)
|
||||
SET(VERSION_GITHASH 3f8b27d7accd2b5ec4afe7d0dd459115323304af)
|
||||
SET(VERSION_DESCRIBE v24.8.1.1-testing)
|
||||
SET(VERSION_STRING 24.8.1.1)
|
||||
SET(VERSION_GITHASH b12a367741812f9e5fe754d19ebae600e2a2614c)
|
||||
SET(VERSION_DESCRIBE v24.10.1.1-testing)
|
||||
SET(VERSION_STRING 24.10.1.1)
|
||||
# end of autochange
|
||||
|
@ -12,11 +12,13 @@ macro(add_headers_only prefix common_path)
|
||||
add_glob(${prefix}_headers ${common_path}/*.h)
|
||||
endmacro()
|
||||
|
||||
# Assumes the path is under src and that src/ needs to be removed (valid for any subdirectory under src/)
|
||||
macro(extract_into_parent_list src_list dest_list)
|
||||
list(REMOVE_ITEM ${src_list} ${ARGN})
|
||||
get_filename_component(__dir_name ${CMAKE_CURRENT_SOURCE_DIR} NAME)
|
||||
file(RELATIVE_PATH relative ${CMAKE_SOURCE_DIR} ${CMAKE_CURRENT_SOURCE_DIR})
|
||||
string(REPLACE "src/" "" relative ${relative})
|
||||
foreach(file IN ITEMS ${ARGN})
|
||||
list(APPEND ${dest_list} ${__dir_name}/${file})
|
||||
list(APPEND ${dest_list} ${relative}/${file})
|
||||
endforeach()
|
||||
set(${dest_list} "${${dest_list}}" PARENT_SCOPE)
|
||||
endmacro()
|
||||
|
@ -9,10 +9,18 @@ endif ()
|
||||
file(GLOB bprefix "/usr/local/llvm${COMPILER_VERSION_MAJOR}/lib/clang/${COMPILER_VERSION_MAJOR}/lib/${system_processor}-portbld-freebsd*/")
|
||||
message(STATUS "-Bprefix: ${bprefix}")
|
||||
|
||||
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -Bprefix=${bprefix} --print-file-name=libclang_rt.builtins-${system_processor}.a OUTPUT_VARIABLE BUILTINS_LIBRARY OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
execute_process(COMMAND
|
||||
${CMAKE_CXX_COMPILER} -Bprefix=${bprefix} --print-file-name=libclang_rt.builtins-${system_processor}.a
|
||||
OUTPUT_VARIABLE BUILTINS_LIBRARY
|
||||
COMMAND_ERROR_IS_FATAL ANY
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
# --print-file-name simply prints what you passed in case of nothing was resolved, so let's try one other possible option
|
||||
if (BUILTINS_LIBRARY STREQUAL "libclang_rt.builtins-${system_processor}.a")
|
||||
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -Bprefix=${bprefix} --print-file-name=libclang_rt.builtins.a OUTPUT_VARIABLE BUILTINS_LIBRARY OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
execute_process(COMMAND
|
||||
${CMAKE_CXX_COMPILER} -Bprefix=${bprefix} --print-file-name=libclang_rt.builtins.a
|
||||
OUTPUT_VARIABLE BUILTINS_LIBRARY
|
||||
COMMAND_ERROR_IS_FATAL ANY
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
endif()
|
||||
if (BUILTINS_LIBRARY STREQUAL "libclang_rt.builtins.a")
|
||||
message(FATAL_ERROR "libclang_rt.builtins had not been found")
|
||||
|
@ -8,4 +8,7 @@ set (CMAKE_CXX_COMPILER_TARGET "x86_64-pc-freebsd11")
|
||||
set (CMAKE_ASM_COMPILER_TARGET "x86_64-pc-freebsd11")
|
||||
set (CMAKE_SYSROOT "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/freebsd-x86_64")
|
||||
|
||||
# dprintf is used in a patched version of replxx
|
||||
add_compile_definitions(_WITH_DPRINTF)
|
||||
|
||||
set (CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY) # disable linkage check - it doesn't work in CMake
|
||||
|
@ -42,19 +42,9 @@ endif ()
|
||||
# But use 2 parallel jobs, since:
|
||||
# - this is what llvm does
|
||||
# - and I've verfied that lld-11 does not use all available CPU time (in peak) while linking one binary
|
||||
if (CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO" AND ENABLE_THINLTO)
|
||||
if (ARCH_AARCH64)
|
||||
# aarch64 builds start to often fail with OOMs (reason not yet clear), for now let's limit the concurrency
|
||||
message(STATUS "ThinLTO provides its own parallel linking - limiting parallel link jobs to 1.")
|
||||
set (PARALLEL_LINK_JOBS 1)
|
||||
if (LINKER_NAME MATCHES "lld")
|
||||
math(EXPR LTO_JOBS ${NUMBER_OF_LOGICAL_CORES}/4)
|
||||
set (CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO "${CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO} -Wl,--thinlto-jobs=${LTO_JOBS}")
|
||||
endif()
|
||||
elseif (PARALLEL_LINK_JOBS GREATER 2)
|
||||
message(STATUS "ThinLTO provides its own parallel linking - limiting parallel link jobs to 2.")
|
||||
set (PARALLEL_LINK_JOBS 2)
|
||||
endif ()
|
||||
if (CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO" AND ENABLE_THINLTO AND PARALLEL_LINK_JOBS GREATER 2)
|
||||
message(STATUS "ThinLTO provides its own parallel linking - limiting parallel link jobs to 2.")
|
||||
set (PARALLEL_LINK_JOBS 2)
|
||||
endif()
|
||||
|
||||
message(STATUS "Building sub-tree with ${PARALLEL_COMPILE_JOBS} compile jobs and ${PARALLEL_LINK_JOBS} linker jobs (system: ${NUMBER_OF_LOGICAL_CORES} cores, ${TOTAL_PHYSICAL_MEMORY} MB RAM, 'OFF' means the native core count).")
|
||||
|
@ -5,7 +5,11 @@ set (DEFAULT_LIBS "-nodefaultlibs")
|
||||
|
||||
# We need builtins from Clang's RT even without libcxx - for ubsan+int128.
|
||||
# See https://bugs.llvm.org/show_bug.cgi?id=16404
|
||||
execute_process (COMMAND ${CMAKE_CXX_COMPILER} --target=${CMAKE_CXX_COMPILER_TARGET} --print-libgcc-file-name --rtlib=compiler-rt OUTPUT_VARIABLE BUILTINS_LIBRARY OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
execute_process (COMMAND
|
||||
${CMAKE_CXX_COMPILER} --target=${CMAKE_CXX_COMPILER_TARGET} --print-libgcc-file-name --rtlib=compiler-rt
|
||||
OUTPUT_VARIABLE BUILTINS_LIBRARY
|
||||
COMMAND_ERROR_IS_FATAL ANY
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
|
||||
# Apparently, in clang-19, the UBSan support library for C++ was moved out into ubsan_standalone_cxx.a, so we have to include both.
|
||||
if (SANITIZE STREQUAL undefined)
|
||||
|
@ -18,4 +18,4 @@ set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
||||
set (CMAKE_ASM_FLAGS "${CMAKE_ASM_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
||||
|
||||
set (USE_MUSL 1)
|
||||
add_definitions(-DUSE_MUSL=1)
|
||||
add_definitions(-DUSE_MUSL=1 -D__MUSL__=1)
|
||||
|
@ -57,8 +57,8 @@ option(WITH_COVERAGE "Instrumentation for code coverage with default implementat
|
||||
|
||||
if (WITH_COVERAGE)
|
||||
message (STATUS "Enabled instrumentation for code coverage")
|
||||
set(COVERAGE_FLAGS "SHELL:-fprofile-instr-generate -fcoverage-mapping")
|
||||
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fprofile-instr-generate -fcoverage-mapping")
|
||||
set (COVERAGE_FLAGS -fprofile-instr-generate -fcoverage-mapping)
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fprofile-instr-generate -fcoverage-mapping")
|
||||
endif()
|
||||
|
||||
option (SANITIZE_COVERAGE "Instrumentation for code coverage with custom callbacks" OFF)
|
||||
|
@ -5,7 +5,11 @@ if (NOT CMAKE_CXX_COMPILER_ID MATCHES "Clang")
|
||||
endif ()
|
||||
|
||||
# Print details to output
|
||||
execute_process(COMMAND ${CMAKE_CXX_COMPILER} --version OUTPUT_VARIABLE COMPILER_SELF_IDENTIFICATION OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
execute_process(COMMAND ${CMAKE_CXX_COMPILER} --version
|
||||
OUTPUT_VARIABLE COMPILER_SELF_IDENTIFICATION
|
||||
COMMAND_ERROR_IS_FATAL ANY
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
)
|
||||
message (STATUS "Using compiler:\n${COMPILER_SELF_IDENTIFICATION}")
|
||||
|
||||
# Require minimum compiler versions
|
||||
|
@ -90,7 +90,10 @@ endfunction()
|
||||
|
||||
# Function get_cmake_properties returns list of all propreties that cmake supports
|
||||
function(get_cmake_properties outvar)
|
||||
execute_process(COMMAND cmake --help-property-list OUTPUT_VARIABLE cmake_properties)
|
||||
execute_process(COMMAND cmake --help-property-list
|
||||
OUTPUT_VARIABLE cmake_properties
|
||||
COMMAND_ERROR_IS_FATAL ANY
|
||||
)
|
||||
# Convert command output into a CMake list
|
||||
string(REGEX REPLACE ";" "\\\\;" cmake_properties "${cmake_properties}")
|
||||
string(REGEX REPLACE "\n" ";" cmake_properties "${cmake_properties}")
|
||||
|
25
contrib/CMakeLists.txt
vendored
25
contrib/CMakeLists.txt
vendored
@ -71,7 +71,6 @@ add_contrib (zlib-ng-cmake zlib-ng)
|
||||
add_contrib (bzip2-cmake bzip2)
|
||||
add_contrib (minizip-ng-cmake minizip-ng)
|
||||
add_contrib (snappy-cmake snappy)
|
||||
add_contrib (rocksdb-cmake rocksdb)
|
||||
add_contrib (thrift-cmake thrift)
|
||||
# parquet/arrow/orc
|
||||
add_contrib (arrow-cmake arrow) # requires: snappy, thrift, double-conversion
|
||||
@ -146,8 +145,14 @@ add_contrib (isa-l-cmake isa-l)
|
||||
add_contrib (libhdfs3-cmake libhdfs3) # requires: google-protobuf, krb5, isa-l
|
||||
add_contrib (hive-metastore-cmake hive-metastore) # requires: thrift, avro, arrow, libhdfs3
|
||||
add_contrib (cppkafka-cmake cppkafka)
|
||||
add_contrib (libpqxx-cmake libpqxx)
|
||||
add_contrib (libpq-cmake libpq)
|
||||
|
||||
option(ENABLE_LIBPQXX "Enable PostgreSQL" ${ENABLE_LIBRARIES})
|
||||
if (ENABLE_LIBPQXX)
|
||||
add_contrib (postgres-cmake postgres)
|
||||
add_contrib (libpqxx-cmake libpqxx)
|
||||
endif()
|
||||
|
||||
add_contrib (rocksdb-cmake rocksdb) # requires: jemalloc, snappy, zlib, lz4, zstd, liburing
|
||||
add_contrib (nuraft-cmake NuRaft)
|
||||
add_contrib (fast_float-cmake fast_float)
|
||||
add_contrib (idna-cmake idna)
|
||||
@ -155,6 +160,12 @@ add_contrib (datasketches-cpp-cmake datasketches-cpp)
|
||||
add_contrib (incbin-cmake incbin)
|
||||
add_contrib (sqids-cpp-cmake sqids-cpp)
|
||||
|
||||
option(USE_MONGODB "Enable MongoDB support" ${ENABLE_LIBRARIES})
|
||||
if (USE_MONGODB)
|
||||
add_contrib (mongo-c-driver-cmake mongo-c-driver) # requires: zlib
|
||||
add_contrib (mongo-cxx-driver-cmake mongo-cxx-driver) # requires: libmongoc, libbson
|
||||
endif()
|
||||
|
||||
option(ENABLE_NLP "Enable NLP functions support" ${ENABLE_LIBRARIES})
|
||||
if (ENABLE_NLP)
|
||||
add_contrib (libstemmer-c-cmake libstemmer_c)
|
||||
@ -179,7 +190,7 @@ else()
|
||||
message(STATUS "Not using QPL")
|
||||
endif ()
|
||||
|
||||
if (OS_LINUX AND ARCH_AMD64)
|
||||
if (OS_LINUX AND ARCH_AMD64 AND NOT NO_SSE3_OR_HIGHER)
|
||||
option (ENABLE_QATLIB "Enable Intel® QuickAssist Technology Library (QATlib)" ${ENABLE_LIBRARIES})
|
||||
elseif(ENABLE_QATLIB)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "QATLib is only supported on x86_64")
|
||||
@ -205,14 +216,12 @@ add_contrib (morton-nd-cmake morton-nd)
|
||||
if (ARCH_S390X)
|
||||
add_contrib(crc32-s390x-cmake crc32-s390x)
|
||||
endif()
|
||||
add_contrib (annoy-cmake annoy)
|
||||
|
||||
option(ENABLE_USEARCH "Enable USearch (Approximate Neighborhood Search, HNSW) support" ${ENABLE_LIBRARIES})
|
||||
option(ENABLE_USEARCH "Enable USearch" ${ENABLE_LIBRARIES})
|
||||
if (ENABLE_USEARCH)
|
||||
add_contrib (FP16-cmake FP16)
|
||||
add_contrib (robin-map-cmake robin-map)
|
||||
add_contrib (SimSIMD-cmake SimSIMD)
|
||||
add_contrib (usearch-cmake usearch) # requires: FP16, robin-map, SimdSIMD
|
||||
add_contrib (usearch-cmake usearch) # requires: FP16, SimdSIMD
|
||||
else ()
|
||||
message(STATUS "Not using USearch")
|
||||
endif ()
|
||||
|
@ -27,7 +27,7 @@ if (ENABLE_QAT_OUT_OF_TREE_BUILD)
|
||||
${QAT_AL_INCLUDE_DIR}
|
||||
${QAT_USDM_INCLUDE_DIR}
|
||||
${ZSTD_LIBRARY_DIR})
|
||||
target_compile_definitions(_qatzstd_plugin PRIVATE -DDEBUGLEVEL=0 PUBLIC -DENABLE_ZSTD_QAT_CODEC)
|
||||
target_compile_definitions(_qatzstd_plugin PRIVATE -DDEBUGLEVEL=0)
|
||||
add_library (ch_contrib::qatzstd_plugin ALIAS _qatzstd_plugin)
|
||||
else () # In-tree build
|
||||
message(STATUS "Intel QATZSTD in-tree build")
|
||||
@ -78,7 +78,7 @@ else () # In-tree build
|
||||
${QAT_USDM_INCLUDE_DIR}
|
||||
${ZSTD_LIBRARY_DIR}
|
||||
${LIBQAT_HEADER_DIR})
|
||||
target_compile_definitions(_qatzstd_plugin PRIVATE -DDEBUGLEVEL=0 PUBLIC -DENABLE_ZSTD_QAT_CODEC -DINTREE)
|
||||
target_compile_definitions(_qatzstd_plugin PRIVATE -DDEBUGLEVEL=0 PUBLIC -DINTREE)
|
||||
target_include_directories(_qatzstd_plugin SYSTEM PUBLIC $<BUILD_INTERFACE:${QATZSTD_SRC_DIR}> $<INSTALL_INTERFACE:include>)
|
||||
add_library (ch_contrib::qatzstd_plugin ALIAS _qatzstd_plugin)
|
||||
endif ()
|
||||
|
2
contrib/SimSIMD
vendored
2
contrib/SimSIMD
vendored
@ -1 +1 @@
|
||||
Subproject commit de2cb75b9e9e3389d5e1e51fd9f8ed151f3c17cf
|
||||
Subproject commit 91a76d1ac519b3b9dc8957734a3dabd985f00c26
|
1
contrib/annoy
vendored
1
contrib/annoy
vendored
@ -1 +0,0 @@
|
||||
Subproject commit f2ac8e7b48f9a9cf676d3b58286e5455aba8e956
|
@ -1,24 +0,0 @@
|
||||
option(ENABLE_ANNOY "Enable Annoy index support" ${ENABLE_LIBRARIES})
|
||||
|
||||
# Annoy index should be disabled with undefined sanitizer. Because of memory storage optimizations
|
||||
# (https://github.com/ClickHouse/annoy/blob/9d8a603a4cd252448589e84c9846f94368d5a289/src/annoylib.h#L442-L463)
|
||||
# UBSan fails and leads to crash. Simmilar issue is already opened in Annoy repo
|
||||
# https://github.com/spotify/annoy/issues/456
|
||||
# Problem with aligment can lead to errors like
|
||||
# (https://stackoverflow.com/questions/46790550/c-undefined-behavior-strict-aliasing-rule-or-incorrect-alignment)
|
||||
# or will lead to crash on arm https://developer.arm.com/documentation/ka003038/latest
|
||||
# This issues should be resolved before annoy became non-experimental (--> setting "allow_experimental_annoy_index")
|
||||
if ((NOT ENABLE_ANNOY) OR (SANITIZE STREQUAL "undefined") OR (ARCH_AARCH64))
|
||||
message (STATUS "Not using annoy")
|
||||
return()
|
||||
endif()
|
||||
|
||||
set(ANNOY_PROJECT_DIR "${ClickHouse_SOURCE_DIR}/contrib/annoy")
|
||||
set(ANNOY_SOURCE_DIR "${ANNOY_PROJECT_DIR}/src")
|
||||
|
||||
add_library(_annoy INTERFACE)
|
||||
target_include_directories(_annoy SYSTEM INTERFACE ${ANNOY_SOURCE_DIR})
|
||||
|
||||
add_library(ch_contrib::annoy ALIAS _annoy)
|
||||
target_compile_definitions(_annoy INTERFACE ENABLE_ANNOY)
|
||||
target_compile_definitions(_annoy INTERFACE ANNOYLIB_MULTITHREADED_BUILD)
|
2
contrib/aws
vendored
2
contrib/aws
vendored
@ -1 +1 @@
|
||||
Subproject commit 1c2946bfcb7f1e3ae0a858de0b59d4f1a7b4ccaf
|
||||
Subproject commit d5450d76abda556ce145ddabe7e0cc6a7644ec59
|
2
contrib/aws-crt-cpp
vendored
2
contrib/aws-crt-cpp
vendored
@ -1 +1 @@
|
||||
Subproject commit f532d6abc0d2b0d8b5d6fe9e7c51eaedbe4afbd0
|
||||
Subproject commit e5aa45cacfdcda7719ead38760e7c61076f5745f
|
@ -37,7 +37,9 @@ message(STATUS "Packaging with tzdata version: ${TZDATA_VERSION}")
|
||||
execute_process(COMMAND
|
||||
bash -c "cd ${TZDIR} && find * -type f -and ! -name '*.tab' -and ! -name 'localtime' | LC_ALL=C sort | paste -sd ';' -"
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
OUTPUT_VARIABLE TIMEZONES)
|
||||
OUTPUT_VARIABLE TIMEZONES
|
||||
COMMAND_ERROR_IS_FATAL ANY
|
||||
)
|
||||
|
||||
file(APPEND ${TIMEZONES_FILE} "// autogenerated by ClickHouse/contrib/cctz-cmake/CMakeLists.txt\n")
|
||||
file(APPEND ${TIMEZONES_FILE} "#include <incbin.h>\n")
|
||||
|
2
contrib/curl
vendored
2
contrib/curl
vendored
@ -1 +1 @@
|
||||
Subproject commit de7b3e89218467159a7af72d58cea8425946e97d
|
||||
Subproject commit 83bedbd730d62b83744cc26fa0433d3f6e2e4cd6
|
@ -359,7 +359,9 @@ else ()
|
||||
|
||||
execute_process(
|
||||
COMMAND mkdir -p ${PROTOC_BUILD_DIR}
|
||||
COMMAND_ECHO STDOUT)
|
||||
COMMAND_ECHO STDOUT
|
||||
COMMAND_ERROR_IS_FATAL ANY
|
||||
)
|
||||
|
||||
execute_process(
|
||||
COMMAND ${CMAKE_COMMAND}
|
||||
@ -375,11 +377,15 @@ else ()
|
||||
"-DABSL_ENABLE_INSTALL=0"
|
||||
"${protobuf_source_dir}"
|
||||
WORKING_DIRECTORY "${PROTOC_BUILD_DIR}"
|
||||
COMMAND_ECHO STDOUT)
|
||||
COMMAND_ECHO STDOUT
|
||||
COMMAND_ERROR_IS_FATAL ANY
|
||||
)
|
||||
|
||||
execute_process(
|
||||
COMMAND ${CMAKE_COMMAND} --build "${PROTOC_BUILD_DIR}"
|
||||
COMMAND_ECHO STDOUT)
|
||||
COMMAND_ECHO STDOUT
|
||||
COMMAND_ERROR_IS_FATAL ANY
|
||||
)
|
||||
endif ()
|
||||
|
||||
add_executable(protoc IMPORTED GLOBAL)
|
||||
|
2
contrib/grpc
vendored
2
contrib/grpc
vendored
@ -1 +1 @@
|
||||
Subproject commit 1716359d2e28d304a250f9df0e6c0ccad03de8db
|
||||
Subproject commit 7bc3abe952aba1dc7bce7f2f790dc781cb51a41e
|
@ -51,8 +51,9 @@ if (NOT CMAKE_HOST_SYSTEM_NAME STREQUAL CMAKE_SYSTEM_NAME
|
||||
set(OPENSSL_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/openssl-cmake")
|
||||
|
||||
execute_process(
|
||||
COMMAND mkdir -p ${OPENSSL_BUILD_DIR}
|
||||
COMMAND_ECHO STDOUT
|
||||
COMMAND mkdir -p ${OPENSSL_BUILD_DIR}
|
||||
COMMAND_ECHO STDOUT
|
||||
COMMAND_ERROR_IS_FATAL ANY
|
||||
)
|
||||
|
||||
if (CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "amd64|x86_64")
|
||||
@ -89,15 +90,21 @@ if (NOT CMAKE_HOST_SYSTEM_NAME STREQUAL CMAKE_SYSTEM_NAME
|
||||
"-DClickHouse_SOURCE_DIR=${ClickHouse_SOURCE_DIR}"
|
||||
"${OPENSSL_SOURCE_DIR}"
|
||||
WORKING_DIRECTORY "${OPENSSL_BUILD_DIR}"
|
||||
COMMAND_ECHO STDOUT)
|
||||
COMMAND_ECHO STDOUT
|
||||
COMMAND_ERROR_IS_FATAL ANY
|
||||
)
|
||||
|
||||
execute_process(
|
||||
COMMAND ${CMAKE_COMMAND} --build "${OPENSSL_BUILD_DIR}"
|
||||
COMMAND_ECHO STDOUT)
|
||||
COMMAND_ECHO STDOUT
|
||||
COMMAND_ERROR_IS_FATAL ANY
|
||||
)
|
||||
|
||||
execute_process(
|
||||
COMMAND ${CMAKE_COMMAND} --install "${OPENSSL_BUILD_DIR}"
|
||||
COMMAND_ECHO STDOUT)
|
||||
COMMAND_ECHO STDOUT
|
||||
COMMAND_ERROR_IS_FATAL ANY
|
||||
)
|
||||
|
||||
# It's not important on which file we depend, we just want to specify right order
|
||||
add_library(openssl_for_grpc STATIC IMPORTED GLOBAL)
|
||||
@ -108,8 +115,9 @@ if (NOT CMAKE_HOST_SYSTEM_NAME STREQUAL CMAKE_SYSTEM_NAME
|
||||
set (GRPC_CPP_PLUGIN_BUILD_DIR "${_gRPC_BINARY_DIR}/build")
|
||||
|
||||
execute_process(
|
||||
COMMAND mkdir -p ${GRPC_CPP_PLUGIN_BUILD_DIR}
|
||||
COMMAND_ECHO STDOUT
|
||||
COMMAND mkdir -p ${GRPC_CPP_PLUGIN_BUILD_DIR}
|
||||
COMMAND_ECHO STDOUT
|
||||
COMMAND_ERROR_IS_FATAL ANY
|
||||
)
|
||||
|
||||
set(abseil_source_dir "${ClickHouse_SOURCE_DIR}/contrib/abseil-cpp")
|
||||
@ -140,11 +148,15 @@ if (NOT CMAKE_HOST_SYSTEM_NAME STREQUAL CMAKE_SYSTEM_NAME
|
||||
"-DgRPC_SSL_PROVIDER=package"
|
||||
"${_gRPC_SOURCE_DIR}"
|
||||
WORKING_DIRECTORY "${GRPC_CPP_PLUGIN_BUILD_DIR}"
|
||||
COMMAND_ECHO STDOUT)
|
||||
COMMAND_ECHO STDOUT
|
||||
COMMAND_ERROR_IS_FATAL ANY
|
||||
)
|
||||
|
||||
execute_process(
|
||||
COMMAND ${CMAKE_COMMAND} --build "${GRPC_CPP_PLUGIN_BUILD_DIR}"
|
||||
COMMAND_ECHO STDOUT)
|
||||
COMMAND_ECHO STDOUT
|
||||
COMMAND_ERROR_IS_FATAL ANY
|
||||
)
|
||||
|
||||
add_executable(grpc_cpp_plugin IMPORTED GLOBAL)
|
||||
set_target_properties (grpc_cpp_plugin PROPERTIES IMPORTED_LOCATION "${GRPC_CPP_PLUGIN_BUILD_DIR}/grpc_cpp_plugin")
|
||||
|
2
contrib/icu
vendored
2
contrib/icu
vendored
@ -1 +1 @@
|
||||
Subproject commit 7750081bda4b3bc1768ae03849ec70f67ea10625
|
||||
Subproject commit 4216173eeeb39c1d4caaa54a68860e800412d273
|
@ -15,162 +15,64 @@ set(ICUDATA_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/icudata/")
|
||||
# These lists of sources were generated from build log of the original ICU build system (configure + make).
|
||||
|
||||
set(ICUUC_SOURCES
|
||||
"${ICU_SOURCE_DIR}/common/errorcode.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/putil.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/umath.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/utypes.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uinvchar.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/umutex.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucln_cmn.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uinit.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uobject.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/cmemory.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/charstr.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/cstr.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/udata.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucmndata.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/udatamem.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/umapfile.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/udataswp.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/utrie_swap.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucol_swp.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/utrace.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uhash.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uhash_us.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uenum.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ustrenum.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uvector.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ustack.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uvectr32.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uvectr64.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv_bld.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv_cnv.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv_io.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv_cb.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv_err.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnvlat1.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv_u7.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv_u8.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv_u16.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv_u32.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnvscsu.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnvbocu.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv_ext.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnvmbcs.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv2022.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnvhz.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv_lmb.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnvisci.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnvdisp.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv_set.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv_ct.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/resource.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uresbund.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ures_cnv.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uresdata.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/resbund.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/resbund_cnv.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucurr.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/localebuilder.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/localeprioritylist.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/messagepattern.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucat.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/locmap.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uloc.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/locid.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/locutil.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/locavailable.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/locdispnames.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/locdspnm.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/loclikely.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/locresdata.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/lsr.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/loclikelysubtags.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/locdistance.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/localematcher.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/bytestream.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/stringpiece.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/bytesinkutil.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/stringtriebuilder.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/bytestriebuilder.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/bytestrie.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/bytestrieiterator.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucharstrie.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucharstriebuilder.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucharstrieiterator.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/dictionarydata.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/edits.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/appendable.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ustr_cnv.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/unistr_cnv.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/unistr.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/unistr_case.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/unistr_props.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/utf_impl.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ustring.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ustrcase.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucasemap.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucasemap_titlecase_brkiter.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/cstring.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ustrfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ustrtrns.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ustr_wcs.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/utext.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/unistr_case_locale.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ustrcase_locale.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/unistr_titlecase_brkiter.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ustr_titlecase_brkiter.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/normalizer2impl.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/normalizer2.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/filterednormalizer2.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/normlzr.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/unorm.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/unormcmp.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/loadednormalizer2impl.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/chariter.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/schriter.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uchriter.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uiter.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/patternprops.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uchar.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uprops.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucase.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/propname.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ubidi_props.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/characterproperties.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ubidi.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ubidiwrt.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ubidiln.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ushape.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uscript.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uscript_props.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/usc_impl.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/unames.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/utrie.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/utrie2.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/utrie2_builder.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucptrie.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/umutablecptrie.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/bmpset.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/unisetspan.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uset_props.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uniset_props.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uniset_closure.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uset.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uniset.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/usetiter.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ruleiter.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/caniter.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/unifilt.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/unifunct.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uarrsort.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/brkiter.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ubrk.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/brkeng.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/brkiter.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/bytesinkutil.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/bytestream.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/bytestrie.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/bytestriebuilder.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/bytestrieiterator.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/caniter.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/characterproperties.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/chariter.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/charstr.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/cmemory.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/cstr.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/cstring.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/cwchar.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/dictbe.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/dictionarydata.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/dtintrv.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/edits.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/emojiprops.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/errorcode.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/filteredbrk.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/filterednormalizer2.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/icudataver.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/icuplug.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/loadednormalizer2impl.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/localebuilder.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/localematcher.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/localeprioritylist.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/locavailable.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/locbased.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/locdispnames.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/locdistance.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/locdspnm.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/locid.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/loclikely.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/loclikelysubtags.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/locmap.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/locresdata.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/locutil.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/lsr.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/lstmbe.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/messagepattern.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/mlbe.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/normalizer2.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/normalizer2impl.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/normlzr.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/parsepos.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/patternprops.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/pluralmap.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/propname.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/propsvec.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/punycode.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/putil.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/rbbi.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/rbbi_cache.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/rbbidata.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/rbbinode.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/rbbirb.cpp"
|
||||
@ -178,166 +80,180 @@ set(ICUUC_SOURCES
|
||||
"${ICU_SOURCE_DIR}/common/rbbisetb.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/rbbistbl.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/rbbitblb.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/rbbi_cache.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/resbund.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/resbund_cnv.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/resource.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/restrace.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ruleiter.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/schriter.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/serv.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/servnotf.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/servls.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/servlk.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/servlkf.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/servls.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/servnotf.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/servrbf.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/servslkf.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uidna.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/usprep.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uts46.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/punycode.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/util.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/util_props.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/parsepos.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/locbased.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/cwchar.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/wintz.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/dtintrv.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnvsel.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/propsvec.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ulist.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uloc_tag.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/icudataver.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/icuplug.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/sharedobject.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/simpleformatter.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/unifiedcache.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uloc_keytype.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ubiditransform.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/pluralmap.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/static_unicode_sets.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/restrace.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/emojiprops.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/lstmbe.cpp")
|
||||
"${ICU_SOURCE_DIR}/common/stringpiece.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/stringtriebuilder.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uarrsort.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ubidi.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ubidi_props.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ubidiln.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ubiditransform.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ubidiwrt.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ubrk.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucase.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucasemap.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucasemap_titlecase_brkiter.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucat.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uchar.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucharstrie.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucharstriebuilder.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucharstrieiterator.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uchriter.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucln_cmn.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucmndata.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv2022.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv_bld.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv_cb.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv_cnv.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv_ct.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv_err.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv_ext.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv_io.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv_lmb.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv_set.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv_u16.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv_u32.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv_u7.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnv_u8.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnvbocu.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnvdisp.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnvhz.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnvisci.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnvlat1.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnvmbcs.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnvscsu.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucnvsel.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucol_swp.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucptrie.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ucurr.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/udata.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/udatamem.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/udataswp.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uenum.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uhash.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uhash_us.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uidna.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uinit.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uinvchar.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uiter.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ulist.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uloc.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uloc_keytype.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uloc_tag.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ulocale.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ulocbuilder.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/umapfile.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/umath.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/umutablecptrie.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/umutex.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/unames.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/unifiedcache.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/unifilt.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/unifunct.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uniset.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uniset_closure.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uniset_props.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/unisetspan.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/unistr.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/unistr_case.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/unistr_case_locale.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/unistr_cnv.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/unistr_props.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/unistr_titlecase_brkiter.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/unorm.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/unormcmp.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uobject.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uprops.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ures_cnv.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uresbund.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uresdata.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/usc_impl.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uscript.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uscript_props.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uset.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uset_props.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/usetiter.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ushape.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/usprep.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ustack.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ustr_cnv.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ustr_titlecase_brkiter.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ustr_wcs.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ustrcase.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ustrcase_locale.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ustrenum.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ustrfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ustring.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/ustrtrns.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/utext.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/utf_impl.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/util.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/util_props.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/utrace.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/utrie.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/utrie2.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/utrie2_builder.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/utrie_swap.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uts46.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/utypes.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uvector.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uvectr32.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/uvectr64.cpp"
|
||||
"${ICU_SOURCE_DIR}/common/wintz.cpp")
|
||||
|
||||
set(ICUI18N_SOURCES
|
||||
"${ICU_SOURCE_DIR}/i18n/ucln_in.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/fmtable.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/format.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/msgfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/umsg.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/numfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/unum.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/decimfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/dcfmtsym.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/fmtable_cnv.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/choicfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/datefmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/smpdtfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/reldtfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/dtfmtsym.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/udat.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/dtptngen.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/udatpg.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/nfrs.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/nfrule.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/nfsubs.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/rbnf.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/numsys.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/unumsys.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/ucsdet.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/ucal.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/calendar.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/gregocal.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/timezone.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/simpletz.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/olsontz.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/alphaindex.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/anytrans.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/astro.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/taiwncal.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/basictz.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/bocsu.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/brktrans.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/buddhcal.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/persncal.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/islamcal.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/japancal.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/gregoimp.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/hebrwcal.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/indiancal.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/chnsecal.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/calendar.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/casetrn.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/cecal.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/coptccal.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/dangical.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/ethpccal.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/chnsecal.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/choicfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/coleitr.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/coll.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/sortkey.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/bocsu.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/ucoleitr.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/ucol.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/ucol_res.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/ucol_sit.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collation.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collationsettings.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collationbuilder.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collationcompare.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collationdata.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collationtailoring.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collationdatabuilder.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collationdatareader.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collationdatawriter.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collationfastlatin.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collationfastlatinbuilder.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collationfcd.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collationiterator.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/utf16collationiterator.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/utf8collationiterator.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/uitercollationiterator.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collationsets.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collationcompare.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collationfastlatin.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collationkeys.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/rulebasedcollator.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collationroot.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collationrootelements.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collationdatabuilder.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collationweights.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collationruleparser.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collationbuilder.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collationfastlatinbuilder.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/listformatter.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/ulistformatter.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/strmatch.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/usearch.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/search.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/stsearch.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/translit.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/utrans.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/esctrn.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/unesctrn.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/funcrepl.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/strrepl.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/tridpars.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collationsets.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collationsettings.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collationtailoring.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/collationweights.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/compactdecimalformat.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/coptccal.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/cpdtrans.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/rbt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/rbt_data.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/rbt_pars.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/rbt_rule.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/rbt_set.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/nultrans.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/remtrans.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/casetrn.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/titletrn.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/tolowtrn.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/toupptrn.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/anytrans.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/name2uni.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/uni2name.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/nortrans.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/quant.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/transreg.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/brktrans.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/regexcmp.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/rematch.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/repattrn.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/regexst.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/regextxt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/regeximp.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/uregex.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/uregexc.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/ulocdata.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/measfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/currfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/curramt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/currunit.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/measure.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/utmscale.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/csdetect.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/csmatch.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/csr2022.cpp"
|
||||
@ -346,60 +262,80 @@ set(ICUI18N_SOURCES
|
||||
"${ICU_SOURCE_DIR}/i18n/csrsbcs.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/csrucode.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/csrutf8.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/inputext.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/wintzimpl.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/windtfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/winnmfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/basictz.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/dtrule.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/rbtz.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/tzrule.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/tztrans.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/vtzone.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/zonemeta.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/standardplural.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/upluralrules.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/plurrule.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/plurfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/selfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/curramt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/currfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/currpinf.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/currunit.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/dangical.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/datefmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/dayperiodrules.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/dcfmtsym.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/decContext.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/decNumber.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/decimfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/displayoptions.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/double-conversion-bignum-dtoa.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/double-conversion-bignum.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/double-conversion-cached-powers.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/double-conversion-double-to-string.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/double-conversion-fast-dtoa.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/double-conversion-string-to-double.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/double-conversion-strtod.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/dtfmtsym.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/dtitvfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/dtitvinf.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/udateintervalformat.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/tmunit.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/tmutamt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/tmutfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/currpinf.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/uspoof.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/uspoof_impl.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/uspoof_build.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/uspoof_conf.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/smpdtfst.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/ztrans.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/zrule.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/vzone.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/dtptngen.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/dtrule.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/erarules.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/esctrn.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/ethpccal.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/fmtable.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/fmtable_cnv.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/format.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/formatted_string_builder.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/formattedval_iterimpl.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/formattedval_sbimpl.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/formattedvalue.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/fphdlimp.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/fpositer.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/ufieldpositer.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/decNumber.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/decContext.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/alphaindex.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/tznames.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/tznames_impl.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/tzgnames.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/tzfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/compactdecimalformat.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/funcrepl.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/gender.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/region.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/scriptset.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/uregion.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/reldatefmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/quantityformatter.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/gregocal.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/gregoimp.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/hebrwcal.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/indiancal.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/inputext.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/islamcal.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/iso8601cal.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/japancal.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/listformatter.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/measfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/measunit.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/sharedbreakiterator.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/scientificnumberformatter.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/dayperiodrules.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/measunit_extra.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/measure.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/messageformat2.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/messageformat2_arguments.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/messageformat2_checker.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/messageformat2_data_model.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/messageformat2_errors.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/messageformat2_evaluation.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/messageformat2_formattable.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/messageformat2_formatter.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/messageformat2_function_registry.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/messageformat2_parser.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/messageformat2_serializer.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/msgfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/name2uni.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/nfrs.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/nfrule.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/nfsubs.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/nortrans.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/nultrans.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_affixutils.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_asformat.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_capi.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_compact.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_currencysymbols.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_decimalquantity.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_decimfmtprops.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_fluent.cpp"
|
||||
@ -407,7 +343,9 @@ set(ICUI18N_SOURCES
|
||||
"${ICU_SOURCE_DIR}/i18n/number_grouping.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_integerwidth.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_longnames.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_mapper.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_modifiers.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_multiplier.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_notation.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_output.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_padding.cpp"
|
||||
@ -415,46 +353,125 @@ set(ICUI18N_SOURCES
|
||||
"${ICU_SOURCE_DIR}/i18n/number_patternstring.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_rounding.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_scientific.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_utils.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_asformat.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_mapper.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_multiplier.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_currencysymbols.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_simple.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_skeletons.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_capi.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/double-conversion-string-to-double.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/double-conversion-double-to-string.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/double-conversion-bignum-dtoa.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/double-conversion-bignum.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/double-conversion-cached-powers.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/double-conversion-fast-dtoa.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/double-conversion-strtod.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/string_segment.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/numparse_parsednumber.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/numparse_impl.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/numparse_symbols.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/numparse_decimal.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/numparse_scientific.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/numparse_currency.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/numparse_affixes.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/numparse_compositions.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/numparse_validators.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/numrange_fluent.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/numrange_impl.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/erarules.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/formattedvalue.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/formattedval_iterimpl.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/formattedval_sbimpl.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/formatted_string_builder.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/measunit_extra.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_symbolswrapper.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_usageprefs.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/number_utils.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/numfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/numparse_affixes.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/numparse_compositions.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/numparse_currency.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/numparse_decimal.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/numparse_impl.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/numparse_parsednumber.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/numparse_scientific.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/numparse_symbols.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/numparse_validators.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/numrange_capi.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/numrange_fluent.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/numrange_impl.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/numsys.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/olsontz.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/persncal.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/pluralranges.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/plurfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/plurrule.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/quant.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/quantityformatter.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/rbnf.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/rbt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/rbt_data.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/rbt_pars.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/rbt_rule.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/rbt_set.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/rbtz.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/regexcmp.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/regeximp.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/regexst.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/regextxt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/region.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/reldatefmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/reldtfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/rematch.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/remtrans.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/repattrn.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/rulebasedcollator.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/scientificnumberformatter.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/scriptset.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/search.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/selfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/sharedbreakiterator.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/simpletz.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/smpdtfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/smpdtfst.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/sortkey.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/standardplural.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/string_segment.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/strmatch.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/strrepl.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/stsearch.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/taiwncal.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/timezone.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/titletrn.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/tmunit.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/tmutamt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/tmutfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/tolowtrn.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/toupptrn.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/translit.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/transreg.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/tridpars.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/tzfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/tzgnames.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/tznames.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/tznames_impl.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/tzrule.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/tztrans.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/ucal.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/ucln_in.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/ucol.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/ucol_res.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/ucol_sit.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/ucoleitr.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/ucsdet.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/udat.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/udateintervalformat.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/udatpg.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/ufieldpositer.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/uitercollationiterator.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/ulistformatter.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/ulocdata.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/umsg.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/unesctrn.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/uni2name.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/units_complexconverter.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/units_converter.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/units_data.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/units_router.cpp")
|
||||
"${ICU_SOURCE_DIR}/i18n/units_router.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/unum.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/unumsys.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/upluralrules.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/uregex.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/uregexc.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/uregion.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/usearch.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/uspoof.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/uspoof_build.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/uspoof_conf.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/uspoof_impl.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/utf16collationiterator.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/utf8collationiterator.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/utmscale.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/utrans.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/vtzone.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/vzone.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/windtfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/winnmfmt.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/wintzimpl.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/zonemeta.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/zrule.cpp"
|
||||
"${ICU_SOURCE_DIR}/i18n/ztrans.cpp")
|
||||
|
||||
file(GENERATE OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/empty.cpp" CONTENT " ")
|
||||
enable_language(ASM)
|
||||
@ -464,6 +481,11 @@ if (ARCH_S390X)
|
||||
else()
|
||||
set(ICUDATA_SOURCE_FILE "${ICUDATA_SOURCE_DIR}/icudt75l_dat.S" )
|
||||
endif()
|
||||
# ^^ you might be confused how for different little endian platforms (x86, ARM) the same assembly files can be used.
|
||||
# These files are indeed assembly but they only contain data ('.long' directive), which makes them portable accross CPUs.
|
||||
# Only the endianness and the character set (ASCII, EBCDIC) makes a difference, also see
|
||||
# https://unicode-org.github.io/icu/userguide/icu_data/#sharing-icu-data-between-platforms, 'Sharing ICU Data Between Platforms')
|
||||
# (and as an experiment, try re-generating the data files on x86 vs. ARM, ... you'll get exactly the same files)
|
||||
|
||||
set(ICUDATA_SOURCES
|
||||
"${ICUDATA_SOURCE_FILE}"
|
||||
|
2
contrib/libarchive
vendored
2
contrib/libarchive
vendored
@ -1 +1 @@
|
||||
Subproject commit ee45796171324519f0c0bfd012018dd099296336
|
||||
Subproject commit 313aa1fa10b657de791e3202c168a6c833bc3543
|
@ -1,6 +1,6 @@
|
||||
set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/libarchive")
|
||||
|
||||
set(SRCS
|
||||
set(SRCS
|
||||
"${LIBRARY_DIR}/libarchive/archive_acl.c"
|
||||
"${LIBRARY_DIR}/libarchive/archive_blake2sp_ref.c"
|
||||
"${LIBRARY_DIR}/libarchive/archive_blake2s_ref.c"
|
||||
@ -135,7 +135,7 @@ set(SRCS
|
||||
)
|
||||
|
||||
add_library(_libarchive ${SRCS})
|
||||
target_include_directories(_libarchive PUBLIC
|
||||
target_include_directories(_libarchive PUBLIC
|
||||
${CMAKE_CURRENT_SOURCE_DIR}
|
||||
"${LIBRARY_DIR}/libarchive"
|
||||
)
|
||||
@ -157,7 +157,7 @@ if (TARGET ch_contrib::zlib)
|
||||
endif()
|
||||
|
||||
if (TARGET ch_contrib::zstd)
|
||||
target_compile_definitions(_libarchive PUBLIC HAVE_ZSTD_H=1 HAVE_LIBZSTD=1 HAVE_LIBZSTD_COMPRESSOR=1)
|
||||
target_compile_definitions(_libarchive PUBLIC HAVE_ZSTD_H=1 HAVE_LIBZSTD=1 HAVE_ZSTD_compressStream=1)
|
||||
target_link_libraries(_libarchive PRIVATE ch_contrib::zstd)
|
||||
endif()
|
||||
|
||||
@ -179,4 +179,4 @@ if (OS_LINUX)
|
||||
)
|
||||
endif()
|
||||
|
||||
add_library(ch_contrib::libarchive ALIAS _libarchive)
|
||||
add_library(ch_contrib::libarchive ALIAS _libarchive)
|
||||
|
@ -334,13 +334,16 @@ typedef uint64_t uintmax_t;
|
||||
/* #undef ARCHIVE_XATTR_LINUX */
|
||||
|
||||
/* Version number of bsdcpio */
|
||||
#define BSDCPIO_VERSION_STRING "3.7.0"
|
||||
#define BSDCPIO_VERSION_STRING "3.7.4"
|
||||
|
||||
/* Version number of bsdtar */
|
||||
#define BSDTAR_VERSION_STRING "3.7.0"
|
||||
#define BSDTAR_VERSION_STRING "3.7.4"
|
||||
|
||||
/* Version number of bsdcat */
|
||||
#define BSDCAT_VERSION_STRING "3.7.0"
|
||||
#define BSDCAT_VERSION_STRING "3.7.4"
|
||||
|
||||
/* Version number of bsdunzip */
|
||||
#define BSDUNZIP_VERSION_STRING "3.7.4"
|
||||
|
||||
/* Define to 1 if you have the `acl_create_entry' function. */
|
||||
/* #undef HAVE_ACL_CREATE_ENTRY */
|
||||
@ -642,8 +645,8 @@ typedef uint64_t uintmax_t;
|
||||
/* Define to 1 if you have the `getgrnam_r' function. */
|
||||
#define HAVE_GETGRNAM_R 1
|
||||
|
||||
/* Define to 1 if platform uses `optreset` to reset `getopt` */
|
||||
#define HAVE_GETOPT_OPTRESET 1
|
||||
/* Define to 1 if you have the `getline' function. */
|
||||
#define HAVE_GETLINE 1
|
||||
|
||||
/* Define to 1 if you have the `getpid' function. */
|
||||
#define HAVE_GETPID 1
|
||||
@ -750,6 +753,12 @@ typedef uint64_t uintmax_t;
|
||||
/* Define to 1 if you have the `pcreposix' library (-lpcreposix). */
|
||||
/* #undef HAVE_LIBPCREPOSIX */
|
||||
|
||||
/* Define to 1 if you have the `pcre2-8' library (-lpcre2-8). */
|
||||
/* #undef HAVE_LIBPCRE2 */
|
||||
|
||||
/* Define to 1 if you have the `pcreposix' library (-lpcre2posix). */
|
||||
/* #undef HAVE_LIBPCRE2POSIX */
|
||||
|
||||
/* Define to 1 if you have the `xml2' library (-lxml2). */
|
||||
#define HAVE_LIBXML2 1
|
||||
|
||||
@ -765,9 +774,8 @@ typedef uint64_t uintmax_t;
|
||||
/* Define to 1 if you have the `zstd' library (-lzstd). */
|
||||
/* #undef HAVE_LIBZSTD */
|
||||
|
||||
/* Define to 1 if you have the `zstd' library (-lzstd) with compression
|
||||
support. */
|
||||
/* #undef HAVE_LIBZSTD_COMPRESSOR */
|
||||
/* Define to 1 if you have the ZSTD_compressStream function. */
|
||||
/* #undef HAVE_ZSTD_compressStream */
|
||||
|
||||
/* Define to 1 if you have the <limits.h> header file. */
|
||||
#define HAVE_LIMITS_H 1
|
||||
@ -923,6 +931,9 @@ typedef uint64_t uintmax_t;
|
||||
/* Define to 1 if you have the <pcreposix.h> header file. */
|
||||
/* #undef HAVE_PCREPOSIX_H */
|
||||
|
||||
/* Define to 1 if you have the <pcre2posix.h> header file. */
|
||||
/* #undef HAVE_PCRE2POSIX_H */
|
||||
|
||||
/* Define to 1 if you have the `pipe' function. */
|
||||
#define HAVE_PIPE 1
|
||||
|
||||
@ -1029,6 +1040,12 @@ typedef uint64_t uintmax_t;
|
||||
/* Define to 1 if you have the `strrchr' function. */
|
||||
#define HAVE_STRRCHR 1
|
||||
|
||||
/* Define to 1 if the system has the type `struct statfs'. */
|
||||
/* #undef HAVE_STRUCT_STATFS */
|
||||
|
||||
/* Define to 1 if `f_iosize' is a member of `struct statfs'. */
|
||||
/* #undef HAVE_STRUCT_STATFS_F_IOSIZE */
|
||||
|
||||
/* Define to 1 if `f_namemax' is a member of `struct statfs'. */
|
||||
/* #undef HAVE_STRUCT_STATFS_F_NAMEMAX */
|
||||
|
||||
@ -1077,6 +1094,9 @@ typedef uint64_t uintmax_t;
|
||||
/* Define to 1 if you have the `symlink' function. */
|
||||
#define HAVE_SYMLINK 1
|
||||
|
||||
/* Define to 1 if you have the `sysconf' function. */
|
||||
#define HAVE_SYSCONF 1
|
||||
|
||||
/* Define to 1 if you have the <sys/acl.h> header file. */
|
||||
/* #undef HAVE_SYS_ACL_H */
|
||||
|
||||
@ -1273,13 +1293,13 @@ typedef uint64_t uintmax_t;
|
||||
/* #undef HAVE__MKGMTIME */
|
||||
|
||||
/* Define as const if the declaration of iconv() needs const. */
|
||||
#define ICONV_CONST
|
||||
#define ICONV_CONST
|
||||
|
||||
/* Version number of libarchive as a single integer */
|
||||
#define LIBARCHIVE_VERSION_NUMBER "3007000"
|
||||
#define LIBARCHIVE_VERSION_NUMBER "3007004"
|
||||
|
||||
/* Version number of libarchive */
|
||||
#define LIBARCHIVE_VERSION_STRING "3.7.0"
|
||||
#define LIBARCHIVE_VERSION_STRING "3.7.4"
|
||||
|
||||
/* Define to 1 if `lstat' dereferences a symlink specified with a trailing
|
||||
slash. */
|
||||
@ -1333,7 +1353,7 @@ typedef uint64_t uintmax_t;
|
||||
#endif /* SAFE_TO_DEFINE_EXTENSIONS */
|
||||
|
||||
/* Version number of package */
|
||||
#define VERSION "3.7.0"
|
||||
#define VERSION "3.7.4"
|
||||
|
||||
/* Number of bits in a file offset, on hosts where this is settable. */
|
||||
/* #undef _FILE_OFFSET_BITS */
|
||||
|
2
contrib/libfiu
vendored
2
contrib/libfiu
vendored
@ -1 +1 @@
|
||||
Subproject commit b85edbde4cf974b1b40d27828a56f0505f4e2ee5
|
||||
Subproject commit a1290d8cd3d7b4541d6c976e0a54f572ac03f2a3
|
@ -1,20 +1,21 @@
|
||||
if (NOT ENABLE_FIU)
|
||||
message (STATUS "Not using fiu")
|
||||
if (NOT ENABLE_LIBFIU)
|
||||
message (STATUS "Not using libfiu")
|
||||
return ()
|
||||
endif ()
|
||||
|
||||
set(FIU_DIR "${ClickHouse_SOURCE_DIR}/contrib/libfiu/")
|
||||
set(LIBFIU_DIR "${ClickHouse_SOURCE_DIR}/contrib/libfiu/")
|
||||
|
||||
set(FIU_SOURCES
|
||||
${FIU_DIR}/libfiu/fiu.c
|
||||
${FIU_DIR}/libfiu/fiu-rc.c
|
||||
${FIU_DIR}/libfiu/backtrace.c
|
||||
${FIU_DIR}/libfiu/wtable.c
|
||||
set(LIBFIU_SOURCES
|
||||
${LIBFIU_DIR}/libfiu/fiu.c
|
||||
${LIBFIU_DIR}/libfiu/fiu-rc.c
|
||||
${LIBFIU_DIR}/libfiu/backtrace.c
|
||||
${LIBFIU_DIR}/libfiu/wtable.c
|
||||
)
|
||||
|
||||
set(FIU_HEADERS "${FIU_DIR}/libfiu")
|
||||
set(LIBFIU_HEADERS "${LIBFIU_DIR}/libfiu")
|
||||
|
||||
add_library(_fiu ${FIU_SOURCES})
|
||||
target_compile_definitions(_fiu PUBLIC DUMMY_BACKTRACE)
|
||||
target_include_directories(_fiu PUBLIC ${FIU_HEADERS})
|
||||
add_library(ch_contrib::fiu ALIAS _fiu)
|
||||
add_library(_libfiu ${LIBFIU_SOURCES})
|
||||
target_compile_definitions(_libfiu PUBLIC DUMMY_BACKTRACE)
|
||||
target_compile_definitions(_libfiu PUBLIC FIU_ENABLE)
|
||||
target_include_directories(_libfiu PUBLIC ${LIBFIU_HEADERS})
|
||||
add_library(ch_contrib::libfiu ALIAS _libfiu)
|
||||
|
1
contrib/libpq
vendored
1
contrib/libpq
vendored
@ -1 +0,0 @@
|
||||
Subproject commit 2446f2c85650b56df9d4ebc4c2ea7f4b01beee57
|
@ -1,78 +0,0 @@
|
||||
if (NOT ENABLE_LIBPQXX)
|
||||
return()
|
||||
endif()
|
||||
|
||||
set(LIBPQ_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libpq")
|
||||
|
||||
set(SRCS
|
||||
"${LIBPQ_SOURCE_DIR}/fe-auth.c"
|
||||
"${LIBPQ_SOURCE_DIR}/fe-auth-scram.c"
|
||||
"${LIBPQ_SOURCE_DIR}/fe-connect.c"
|
||||
"${LIBPQ_SOURCE_DIR}/fe-exec.c"
|
||||
"${LIBPQ_SOURCE_DIR}/fe-lobj.c"
|
||||
"${LIBPQ_SOURCE_DIR}/fe-misc.c"
|
||||
"${LIBPQ_SOURCE_DIR}/fe-print.c"
|
||||
"${LIBPQ_SOURCE_DIR}/fe-trace.c"
|
||||
"${LIBPQ_SOURCE_DIR}/fe-protocol3.c"
|
||||
"${LIBPQ_SOURCE_DIR}/fe-secure.c"
|
||||
"${LIBPQ_SOURCE_DIR}/fe-secure-common.c"
|
||||
"${LIBPQ_SOURCE_DIR}/fe-secure-openssl.c"
|
||||
"${LIBPQ_SOURCE_DIR}/legacy-pqsignal.c"
|
||||
"${LIBPQ_SOURCE_DIR}/libpq-events.c"
|
||||
"${LIBPQ_SOURCE_DIR}/pqexpbuffer.c"
|
||||
|
||||
"${LIBPQ_SOURCE_DIR}/common/scram-common.c"
|
||||
"${LIBPQ_SOURCE_DIR}/common/sha2.c"
|
||||
"${LIBPQ_SOURCE_DIR}/common/sha1.c"
|
||||
"${LIBPQ_SOURCE_DIR}/common/md5.c"
|
||||
"${LIBPQ_SOURCE_DIR}/common/md5_common.c"
|
||||
"${LIBPQ_SOURCE_DIR}/common/hmac_openssl.c"
|
||||
"${LIBPQ_SOURCE_DIR}/common/cryptohash.c"
|
||||
"${LIBPQ_SOURCE_DIR}/common/saslprep.c"
|
||||
"${LIBPQ_SOURCE_DIR}/common/unicode_norm.c"
|
||||
"${LIBPQ_SOURCE_DIR}/common/ip.c"
|
||||
"${LIBPQ_SOURCE_DIR}/common/jsonapi.c"
|
||||
"${LIBPQ_SOURCE_DIR}/common/wchar.c"
|
||||
"${LIBPQ_SOURCE_DIR}/common/base64.c"
|
||||
"${LIBPQ_SOURCE_DIR}/common/link-canary.c"
|
||||
"${LIBPQ_SOURCE_DIR}/common/fe_memutils.c"
|
||||
"${LIBPQ_SOURCE_DIR}/common/string.c"
|
||||
"${LIBPQ_SOURCE_DIR}/common/pg_get_line.c"
|
||||
"${LIBPQ_SOURCE_DIR}/common/stringinfo.c"
|
||||
"${LIBPQ_SOURCE_DIR}/common/psprintf.c"
|
||||
"${LIBPQ_SOURCE_DIR}/common/encnames.c"
|
||||
"${LIBPQ_SOURCE_DIR}/common/logging.c"
|
||||
|
||||
"${LIBPQ_SOURCE_DIR}/port/snprintf.c"
|
||||
"${LIBPQ_SOURCE_DIR}/port/strlcpy.c"
|
||||
"${LIBPQ_SOURCE_DIR}/port/strerror.c"
|
||||
"${LIBPQ_SOURCE_DIR}/port/inet_net_ntop.c"
|
||||
"${LIBPQ_SOURCE_DIR}/port/getpeereid.c"
|
||||
"${LIBPQ_SOURCE_DIR}/port/chklocale.c"
|
||||
"${LIBPQ_SOURCE_DIR}/port/noblock.c"
|
||||
"${LIBPQ_SOURCE_DIR}/port/pg_strong_random.c"
|
||||
"${LIBPQ_SOURCE_DIR}/port/pgstrcasecmp.c"
|
||||
"${LIBPQ_SOURCE_DIR}/port/thread.c"
|
||||
"${LIBPQ_SOURCE_DIR}/port/path.c"
|
||||
)
|
||||
|
||||
add_library(_libpq ${SRCS})
|
||||
|
||||
add_definitions(-DHAVE_BIO_METH_NEW)
|
||||
add_definitions(-DHAVE_HMAC_CTX_NEW)
|
||||
add_definitions(-DHAVE_HMAC_CTX_FREE)
|
||||
|
||||
target_include_directories (_libpq SYSTEM PUBLIC ${LIBPQ_SOURCE_DIR})
|
||||
target_include_directories (_libpq SYSTEM PUBLIC "${LIBPQ_SOURCE_DIR}/include")
|
||||
target_include_directories (_libpq SYSTEM PRIVATE "${LIBPQ_SOURCE_DIR}/configs")
|
||||
|
||||
# NOTE: this is a dirty hack to avoid and instead pg_config.h should be shipped
|
||||
# for different OS'es like for jemalloc, not one generic for all OS'es like
|
||||
# now.
|
||||
if (OS_DARWIN OR OS_FREEBSD OR USE_MUSL)
|
||||
target_compile_definitions(_libpq PRIVATE -DSTRERROR_R_INT=1)
|
||||
endif()
|
||||
|
||||
target_link_libraries (_libpq PRIVATE OpenSSL::SSL)
|
||||
|
||||
add_library(ch_contrib::libpq ALIAS _libpq)
|
2
contrib/libpqxx
vendored
2
contrib/libpqxx
vendored
@ -1 +1 @@
|
||||
Subproject commit c995193a3a14d71f4711f1f421f65a1a1db64640
|
||||
Subproject commit 41e4c331564167cca97ad6eccbd5b8879c2ca044
|
@ -1,16 +1,9 @@
|
||||
option(ENABLE_LIBPQXX "Enalbe libpqxx" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (NOT ENABLE_LIBPQXX)
|
||||
message(STATUS "Not using libpqxx")
|
||||
return()
|
||||
endif()
|
||||
|
||||
set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/libpqxx")
|
||||
|
||||
set (SRCS
|
||||
"${LIBRARY_DIR}/src/strconv.cxx"
|
||||
"${LIBRARY_DIR}/src/array.cxx"
|
||||
"${LIBRARY_DIR}/src/binarystring.cxx"
|
||||
"${LIBRARY_DIR}/src/blob.cxx"
|
||||
"${LIBRARY_DIR}/src/connection.cxx"
|
||||
"${LIBRARY_DIR}/src/cursor.cxx"
|
||||
"${LIBRARY_DIR}/src/encodings.cxx"
|
||||
@ -19,59 +12,25 @@ set (SRCS
|
||||
"${LIBRARY_DIR}/src/field.cxx"
|
||||
"${LIBRARY_DIR}/src/largeobject.cxx"
|
||||
"${LIBRARY_DIR}/src/notification.cxx"
|
||||
"${LIBRARY_DIR}/src/params.cxx"
|
||||
"${LIBRARY_DIR}/src/pipeline.cxx"
|
||||
"${LIBRARY_DIR}/src/result.cxx"
|
||||
"${LIBRARY_DIR}/src/robusttransaction.cxx"
|
||||
"${LIBRARY_DIR}/src/row.cxx"
|
||||
"${LIBRARY_DIR}/src/sql_cursor.cxx"
|
||||
"${LIBRARY_DIR}/src/strconv.cxx"
|
||||
"${LIBRARY_DIR}/src/stream_from.cxx"
|
||||
"${LIBRARY_DIR}/src/stream_to.cxx"
|
||||
"${LIBRARY_DIR}/src/subtransaction.cxx"
|
||||
"${LIBRARY_DIR}/src/time.cxx"
|
||||
"${LIBRARY_DIR}/src/transaction.cxx"
|
||||
"${LIBRARY_DIR}/src/transaction_base.cxx"
|
||||
"${LIBRARY_DIR}/src/row.cxx"
|
||||
"${LIBRARY_DIR}/src/params.cxx"
|
||||
"${LIBRARY_DIR}/src/util.cxx"
|
||||
"${LIBRARY_DIR}/src/version.cxx"
|
||||
"${LIBRARY_DIR}/src/wait.cxx"
|
||||
)
|
||||
|
||||
# Need to explicitly include each header file, because in the directory include/pqxx there are also files
|
||||
# like just 'array'. So if including the whole directory with `target_include_directories`, it will make
|
||||
# conflicts with all includes of <array>.
|
||||
set (HDRS
|
||||
"${LIBRARY_DIR}/include/pqxx/array.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/params.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/binarystring.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/composite.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/connection.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/cursor.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/dbtransaction.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/errorhandler.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/except.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/field.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/isolation.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/largeobject.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/nontransaction.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/notification.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/pipeline.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/prepared_statement.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/result.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/robusttransaction.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/row.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/separated_list.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/strconv.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/stream_from.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/stream_to.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/subtransaction.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/transaction.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/transaction_base.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/types.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/util.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/version.hxx"
|
||||
"${LIBRARY_DIR}/include/pqxx/zview.hxx"
|
||||
)
|
||||
|
||||
add_library(_libpqxx ${SRCS} ${HDRS})
|
||||
|
||||
add_library(_libpqxx ${SRCS})
|
||||
target_link_libraries(_libpqxx PUBLIC ch_contrib::libpq)
|
||||
target_include_directories (_libpqxx SYSTEM BEFORE PUBLIC "${LIBRARY_DIR}/include")
|
||||
|
||||
|
2
contrib/libprotobuf-mutator
vendored
2
contrib/libprotobuf-mutator
vendored
@ -1 +1 @@
|
||||
Subproject commit 1f95f8083066f5b38fd2db172e7e7f9aa7c49d2d
|
||||
Subproject commit b922c8ab9004ef9944982e4f165e2747b13223fa
|
2
contrib/librdkafka
vendored
2
contrib/librdkafka
vendored
@ -1 +1 @@
|
||||
Subproject commit 2d2aab6f5b79db1cfca15d7bf0dee75d00d82082
|
||||
Subproject commit 39d4ed49ccf3406e2bf825d5d7b0903b5a290782
|
2
contrib/libunwind
vendored
2
contrib/libunwind
vendored
@ -1 +1 @@
|
||||
Subproject commit a89d904befea07814628c6ce0b44083c4e149c62
|
||||
Subproject commit 601db0b0e03018c01710470a37703b618f9cf08b
|
2
contrib/libuv
vendored
2
contrib/libuv
vendored
@ -1 +1 @@
|
||||
Subproject commit 4482964660c77eec1166cd7d14fb915e3dbd774a
|
||||
Subproject commit 714b58b9849568211ade86b44dd91d37f8a2175e
|
@ -10,6 +10,7 @@ set(uv_sources
|
||||
src/random.c
|
||||
src/strscpy.c
|
||||
src/strtok.c
|
||||
src/thread-common.c
|
||||
src/threadpool.c
|
||||
src/timer.c
|
||||
src/uv-common.c
|
||||
@ -70,10 +71,7 @@ if(CMAKE_SYSTEM_NAME STREQUAL "Linux")
|
||||
list(APPEND uv_defines _GNU_SOURCE _POSIX_C_SOURCE=200112)
|
||||
list(APPEND uv_libraries rt)
|
||||
list(APPEND uv_sources
|
||||
src/unix/epoll.c
|
||||
src/unix/linux-core.c
|
||||
src/unix/linux-inotify.c
|
||||
src/unix/linux-syscalls.c
|
||||
src/unix/linux.c
|
||||
src/unix/procfs-exepath.c
|
||||
src/unix/random-getrandom.c
|
||||
src/unix/random-sysctl-linux.c)
|
||||
|
@ -140,6 +140,12 @@ if (CMAKE_CROSSCOMPILING)
|
||||
message (STATUS "CROSS COMPILING SET LLVM HOST TRIPLE ${LLVM_HOST_TRIPLE}")
|
||||
endif()
|
||||
|
||||
# llvm-project/llvm/cmake/config-ix.cmake does a weird thing: it defines _LARGEFILE64_SOURCE,
|
||||
# then checks if lseek64() function exists, then undefines _LARGEFILE64_SOURCE.
|
||||
# Then the actual code that uses this function *doesn't* define _LARGEFILE64_SOURCE, so lseek64()
|
||||
# may not exist and compilation fails. This happens with musl.
|
||||
add_compile_definitions("_LARGEFILE64_SOURCE")
|
||||
|
||||
add_subdirectory ("${LLVM_SOURCE_DIR}" "${LLVM_BINARY_DIR}")
|
||||
|
||||
set_directory_properties (PROPERTIES
|
||||
|
1
contrib/mongo-c-driver
vendored
Submodule
1
contrib/mongo-c-driver
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit d55410c69183c90d18fd3b3f1d9db3d224fc8d52
|
150
contrib/mongo-c-driver-cmake/CMakeLists.txt
Normal file
150
contrib/mongo-c-driver-cmake/CMakeLists.txt
Normal file
@ -0,0 +1,150 @@
|
||||
option(USE_MONGODB "Enable MongoDB support" ${ENABLE_LIBRARIES})
|
||||
if(NOT USE_MONGODB)
|
||||
message(STATUS "Not using libmongoc and libbson")
|
||||
return()
|
||||
endif()
|
||||
|
||||
set(libbson_VERSION_MAJOR 1)
|
||||
set(libbson_VERSION_MINOR 27)
|
||||
set(libbson_VERSION_PATCH 0)
|
||||
set(libbson_VERSION 1.27.0)
|
||||
set(libmongoc_VERSION_MAJOR 1)
|
||||
set(libmongoc_VERSION_MINOR 27)
|
||||
set(libmongoc_VERSION_PATCH 0)
|
||||
set(libmongoc_VERSION 1.27.0)
|
||||
|
||||
set(LIBBSON_SOURCES_ROOT "${ClickHouse_SOURCE_DIR}/contrib/mongo-c-driver/src")
|
||||
set(LIBBSON_SOURCE_DIR "${LIBBSON_SOURCES_ROOT}/libbson/src")
|
||||
file(GLOB_RECURSE LIBBSON_SOURCES "${LIBBSON_SOURCE_DIR}/*.c")
|
||||
|
||||
set(LIBBSON_BINARY_ROOT "${ClickHouse_BINARY_DIR}/contrib/mongo-c-driver/src")
|
||||
set(LIBBSON_BINARY_DIR "${LIBBSON_BINARY_ROOT}/libbson/src")
|
||||
|
||||
include(TestBigEndian)
|
||||
test_big_endian(BSON_BIG_ENDIAN)
|
||||
if(BSON_BIG_ENDIAN)
|
||||
set(BSON_BYTE_ORDER 4321)
|
||||
else()
|
||||
set(BSON_BYTE_ORDER 1234)
|
||||
endif()
|
||||
|
||||
set(BSON_OS 1)
|
||||
set(BSON_EXTRA_ALIGN 1)
|
||||
set(BSON_HAVE_SNPRINTF 1)
|
||||
set(BSON_HAVE_TIMESPEC 1)
|
||||
set(BSON_HAVE_GMTIME_R 1)
|
||||
set(BSON_HAVE_RAND_R 1)
|
||||
set(BSON_HAVE_STRINGS_H 1)
|
||||
set(BSON_HAVE_STRLCPY 0)
|
||||
set(BSON_HAVE_STRNLEN 1)
|
||||
set(BSON_HAVE_STDBOOL_H 1)
|
||||
set(BSON_HAVE_CLOCK_GETTIME 1)
|
||||
|
||||
|
||||
# common settings
|
||||
set(MONGOC_TRACE 0)
|
||||
set(MONGOC_ENABLE_STATIC_BUILD 1)
|
||||
set(MONGOC_ENABLE_DEBUG_ASSERTIONS 0)
|
||||
set(MONGOC_ENABLE_MONGODB_AWS_AUTH 0)
|
||||
set(MONGOC_ENABLE_SASL_CYRUS 0)
|
||||
set(MONGOC_ENABLE_SASL 0)
|
||||
set(MONGOC_ENABLE_SASL_SSPI 0)
|
||||
set(MONGOC_HAVE_SASL_CLIENT_DONE 0)
|
||||
set(MONGOC_ENABLE_SRV 0)
|
||||
|
||||
# DNS
|
||||
set(MONGOC_HAVE_DNSAPI 0)
|
||||
set(MONGOC_HAVE_RES_SEARCH 0)
|
||||
set(MONGOC_HAVE_RES_NSEARCH 0)
|
||||
set(MONGOC_HAVE_RES_NCLOSE 0)
|
||||
set(MONGOC_HAVE_RES_NDESTROY 0)
|
||||
|
||||
set(MONGOC_ENABLE_COMPRESSION 1)
|
||||
set(MONGOC_ENABLE_COMPRESSION_ZLIB 0)
|
||||
set(MONGOC_ENABLE_COMPRESSION_SNAPPY 0)
|
||||
set(MONGOC_ENABLE_COMPRESSION_ZSTD 1)
|
||||
|
||||
# SSL
|
||||
set(MONGOC_ENABLE_CRYPTO 0)
|
||||
set(MONGOC_ENABLE_CRYPTO_CNG 0)
|
||||
set(MONGOC_ENABLE_CRYPTO_COMMON_CRYPTO 0)
|
||||
set(MONGOC_ENABLE_CRYPTO_SYSTEM_PROFILE 0)
|
||||
set(MONGOC_ENABLE_SSL 0)
|
||||
set(MONGOC_ENABLE_SSL_OPENSSL 0)
|
||||
set(MONGOC_ENABLE_SSL_SECURE_CHANNEL 0)
|
||||
set(MONGOC_ENABLE_SSL_SECURE_TRANSPORT 0)
|
||||
set(MONGOC_ENABLE_SSL_LIBRESSL 0)
|
||||
set(MONGOC_ENABLE_CRYPTO_LIBCRYPTO 0)
|
||||
set(MONGOC_ENABLE_CLIENT_SIDE_ENCRYPTION 0)
|
||||
set(MONGOC_HAVE_ASN1_STRING_GET0_DATA 0)
|
||||
if(ENABLE_SSL)
|
||||
set(MONGOC_ENABLE_SSL 1)
|
||||
set(MONGOC_ENABLE_CRYPTO 1)
|
||||
set(MONGOC_ENABLE_SSL_OPENSSL 1)
|
||||
set(MONGOC_ENABLE_CRYPTO_LIBCRYPTO 1)
|
||||
set(MONGOC_HAVE_ASN1_STRING_GET0_DATA 1)
|
||||
else()
|
||||
message(WARNING "Building mongoc without SSL")
|
||||
endif()
|
||||
|
||||
set(CMAKE_EXTRA_INCLUDE_FILES "sys/socket.h")
|
||||
set(MONGOC_SOCKET_ARG2 "struct sockaddr")
|
||||
set(MONGOC_HAVE_SOCKLEN 1)
|
||||
set(MONGOC_SOCKET_ARG3 "socklen_t")
|
||||
|
||||
set(MONGOC_ENABLE_RDTSCP 0)
|
||||
set(MONGOC_NO_AUTOMATIC_GLOBALS 1)
|
||||
set(MONGOC_ENABLE_STATIC_INSTALL 0)
|
||||
set(MONGOC_ENABLE_SHM_COUNTERS 0)
|
||||
set(MONGOC_HAVE_SCHED_GETCPU 0)
|
||||
set(MONGOC_HAVE_SS_FAMILY 0)
|
||||
|
||||
configure_file(
|
||||
${LIBBSON_SOURCE_DIR}/bson/bson-config.h.in
|
||||
${LIBBSON_BINARY_DIR}/bson/bson-config.h
|
||||
)
|
||||
configure_file(
|
||||
${LIBBSON_SOURCE_DIR}/bson/bson-version.h.in
|
||||
${LIBBSON_BINARY_DIR}/bson/bson-version.h
|
||||
)
|
||||
|
||||
set(COMMON_SOURCE_DIR "${LIBBSON_SOURCES_ROOT}/common")
|
||||
file(GLOB_RECURSE COMMON_SOURCES "${COMMON_SOURCE_DIR}/*.c")
|
||||
configure_file(
|
||||
${COMMON_SOURCE_DIR}/common-config.h.in
|
||||
${COMMON_SOURCE_DIR}/common-config.h
|
||||
)
|
||||
add_library(_libbson ${LIBBSON_SOURCES} ${COMMON_SOURCES})
|
||||
add_library(ch_contrib::libbson ALIAS _libbson)
|
||||
target_include_directories(_libbson SYSTEM PUBLIC ${LIBBSON_SOURCE_DIR} ${LIBBSON_BINARY_DIR} ${COMMON_SOURCE_DIR})
|
||||
target_compile_definitions(_libbson PRIVATE BSON_COMPILATION)
|
||||
if(OS_LINUX)
|
||||
target_compile_definitions(_libbson PRIVATE -D_GNU_SOURCE -D_POSIX_C_SOURCE=199309L -D_XOPEN_SOURCE=600)
|
||||
elseif(OS_DARWIN)
|
||||
target_compile_definitions(_libbson PRIVATE -D_DARWIN_C_SOURCE)
|
||||
endif()
|
||||
|
||||
|
||||
set(LIBMONGOC_SOURCE_DIR "${LIBBSON_SOURCES_ROOT}/libmongoc/src")
|
||||
file(GLOB_RECURSE LIBMONGOC_SOURCES "${LIBMONGOC_SOURCE_DIR}/*.c")
|
||||
set(UTF8PROC_SOURCE_DIR "${LIBBSON_SOURCES_ROOT}/utf8proc-2.8.0")
|
||||
set(UTF8PROC_SOURCES "${UTF8PROC_SOURCE_DIR}/utf8proc.c")
|
||||
set(UTHASH_SOURCE_DIR "${LIBBSON_SOURCES_ROOT}/uthash")
|
||||
|
||||
configure_file(
|
||||
${LIBMONGOC_SOURCE_DIR}/mongoc/mongoc-config.h.in
|
||||
${LIBMONGOC_SOURCE_DIR}/mongoc/mongoc-config.h
|
||||
)
|
||||
configure_file(
|
||||
${LIBMONGOC_SOURCE_DIR}/mongoc/mongoc-version.h.in
|
||||
${LIBMONGOC_SOURCE_DIR}/mongoc/mongoc-version.h
|
||||
)
|
||||
add_library(_libmongoc ${LIBMONGOC_SOURCES} ${COMMON_SOURCES} ${UTF8PROC_SOURCES})
|
||||
add_library(ch_contrib::libmongoc ALIAS _libmongoc)
|
||||
target_include_directories(_libmongoc SYSTEM PUBLIC ${LIBMONGOC_SOURCE_DIR} ${COMMON_SOURCE_DIR} ${UTF8PROC_SOURCE_DIR} ${UTHASH_SOURCE_DIR})
|
||||
target_include_directories(_libmongoc SYSTEM PRIVATE ${LIBMONGOC_SOURCE_DIR}/mongoc ${UTHASH_SOURCE_DIR})
|
||||
target_compile_definitions(_libmongoc PRIVATE MONGOC_COMPILATION)
|
||||
target_link_libraries(_libmongoc ch_contrib::libbson ch_contrib::c-ares ch_contrib::zstd)
|
||||
if(ENABLE_SSL)
|
||||
target_link_libraries(_libmongoc OpenSSL::SSL)
|
||||
endif()
|
1
contrib/mongo-cxx-driver
vendored
Submodule
1
contrib/mongo-cxx-driver
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 3166bdb49b717ce1bc30f46cc2b274ab1de7005b
|
192
contrib/mongo-cxx-driver-cmake/CMakeLists.txt
Normal file
192
contrib/mongo-cxx-driver-cmake/CMakeLists.txt
Normal file
@ -0,0 +1,192 @@
|
||||
option(USE_MONGODB "Enable MongoDB support" ${ENABLE_LIBRARIES})
|
||||
|
||||
if(NOT USE_MONGODB)
|
||||
message(STATUS "Not using mongocxx and bsoncxx")
|
||||
return()
|
||||
endif()
|
||||
|
||||
set(BSONCXX_SOURCES_DIR "${ClickHouse_SOURCE_DIR}/contrib/mongo-cxx-driver/src/bsoncxx")
|
||||
set(BSONCXX_BINARY_DIR "${ClickHouse_BINARY_DIR}/contrib/mongo-cxx-driver/src/bsoncxx")
|
||||
|
||||
set(BSONCXX_SOURCES
|
||||
${BSONCXX_SOURCES_DIR}/lib/bsoncxx/v_noabi/bsoncxx/array/element.cpp
|
||||
${BSONCXX_SOURCES_DIR}/lib/bsoncxx/v_noabi/bsoncxx/array/value.cpp
|
||||
${BSONCXX_SOURCES_DIR}/lib/bsoncxx/v_noabi/bsoncxx/array/view.cpp
|
||||
${BSONCXX_SOURCES_DIR}/lib/bsoncxx/v_noabi/bsoncxx/builder/core.cpp
|
||||
${BSONCXX_SOURCES_DIR}/lib/bsoncxx/v_noabi/bsoncxx/decimal128.cpp
|
||||
${BSONCXX_SOURCES_DIR}/lib/bsoncxx/v_noabi/bsoncxx/document/element.cpp
|
||||
${BSONCXX_SOURCES_DIR}/lib/bsoncxx/v_noabi/bsoncxx/document/value.cpp
|
||||
${BSONCXX_SOURCES_DIR}/lib/bsoncxx/v_noabi/bsoncxx/document/view.cpp
|
||||
${BSONCXX_SOURCES_DIR}/lib/bsoncxx/v_noabi/bsoncxx/exception/error_code.cpp
|
||||
${BSONCXX_SOURCES_DIR}/lib/bsoncxx/v_noabi/bsoncxx/json.cpp
|
||||
${BSONCXX_SOURCES_DIR}/lib/bsoncxx/v_noabi/bsoncxx/oid.cpp
|
||||
${BSONCXX_SOURCES_DIR}/lib/bsoncxx/v_noabi/bsoncxx/private/itoa.cpp
|
||||
${BSONCXX_SOURCES_DIR}/lib/bsoncxx/v_noabi/bsoncxx/string/view_or_value.cpp
|
||||
${BSONCXX_SOURCES_DIR}/lib/bsoncxx/v_noabi/bsoncxx/types.cpp
|
||||
${BSONCXX_SOURCES_DIR}/lib/bsoncxx/v_noabi/bsoncxx/types/bson_value/value.cpp
|
||||
${BSONCXX_SOURCES_DIR}/lib/bsoncxx/v_noabi/bsoncxx/types/bson_value/view.cpp
|
||||
${BSONCXX_SOURCES_DIR}/lib/bsoncxx/v_noabi/bsoncxx/validate.cpp
|
||||
)
|
||||
set(BSONCXX_POLY_USE_IMPLS ON)
|
||||
|
||||
configure_file(
|
||||
${BSONCXX_SOURCES_DIR}/lib/bsoncxx/v_noabi/bsoncxx/config/config.hpp.in
|
||||
${BSONCXX_BINARY_DIR}/lib/bsoncxx/v_noabi/bsoncxx/config/config.hpp
|
||||
)
|
||||
configure_file(
|
||||
${BSONCXX_SOURCES_DIR}/lib/bsoncxx/v_noabi/bsoncxx/config/version.hpp.in
|
||||
${BSONCXX_BINARY_DIR}/lib/bsoncxx/v_noabi/bsoncxx/config/version.hpp
|
||||
)
|
||||
configure_file(
|
||||
${BSONCXX_SOURCES_DIR}/lib/bsoncxx/v_noabi/bsoncxx/config/private/config.hh.in
|
||||
${BSONCXX_BINARY_DIR}/lib/bsoncxx/v_noabi/bsoncxx/config/private/config.hh
|
||||
)
|
||||
|
||||
add_library(_bsoncxx ${BSONCXX_SOURCES})
|
||||
add_library(ch_contrib::bsoncxx ALIAS _bsoncxx)
|
||||
target_include_directories(_bsoncxx SYSTEM PUBLIC "${BSONCXX_SOURCES_DIR}/include/bsoncxx/v_noabi" "${BSONCXX_SOURCES_DIR}/lib/bsoncxx/v_noabi" "${BSONCXX_BINARY_DIR}/lib/bsoncxx/v_noabi")
|
||||
target_compile_definitions(_bsoncxx PUBLIC BSONCXX_STATIC)
|
||||
target_link_libraries(_bsoncxx ch_contrib::libbson)
|
||||
|
||||
include(GenerateExportHeader)
|
||||
generate_export_header(_bsoncxx
|
||||
BASE_NAME BSONCXX
|
||||
EXPORT_MACRO_NAME BSONCXX_API
|
||||
NO_EXPORT_MACRO_NAME BSONCXX_PRIVATE
|
||||
EXPORT_FILE_NAME ${BSONCXX_BINARY_DIR}/lib/bsoncxx/v_noabi/bsoncxx/config/export.hpp
|
||||
STATIC_DEFINE BSONCXX_STATIC
|
||||
)
|
||||
|
||||
|
||||
|
||||
set(MONGOCXX_SOURCES_DIR "${ClickHouse_SOURCE_DIR}/contrib/mongo-cxx-driver/src/mongocxx")
|
||||
set(MONGOCXX_BINARY_DIR "${ClickHouse_BINARY_DIR}/contrib/mongo-cxx-driver/src/mongocxx")
|
||||
set(MONGOCXX_SOURCES
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/bulk_write.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/change_stream.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/client.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/client_encryption.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/client_session.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/collection.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/cursor.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/database.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/events/command_failed_event.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/events/command_started_event.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/events/command_succeeded_event.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/events/heartbeat_failed_event.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/events/heartbeat_started_event.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/events/heartbeat_succeeded_event.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/events/server_changed_event.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/events/server_closed_event.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/events/server_description.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/events/server_opening_event.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/events/topology_changed_event.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/events/topology_closed_event.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/events/topology_description.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/events/topology_opening_event.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/exception/error_code.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/exception/operation_exception.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/exception/server_error_code.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/gridfs/bucket.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/gridfs/downloader.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/gridfs/uploader.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/hint.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/index_model.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/index_view.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/instance.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/logger.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/model/delete_many.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/model/delete_one.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/model/insert_one.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/model/replace_one.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/model/update_many.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/model/update_one.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/model/write.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/aggregate.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/apm.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/auto_encryption.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/bulk_write.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/change_stream.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/client.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/client_encryption.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/client_session.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/count.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/create_collection.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/data_key.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/delete.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/distinct.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/encrypt.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/estimated_document_count.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/find.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/find_one_and_delete.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/find_one_and_replace.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/find_one_and_update.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/gridfs/bucket.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/gridfs/upload.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/index.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/index_view.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/insert.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/pool.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/range.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/replace.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/rewrap_many_datakey.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/server_api.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/tls.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/transaction.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/options/update.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/pipeline.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/pool.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/private/conversions.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/private/libbson.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/private/libmongoc.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/private/numeric_casting.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/read_concern.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/read_preference.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/result/bulk_write.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/result/delete.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/result/gridfs/upload.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/result/insert_many.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/result/insert_one.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/result/replace_one.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/result/rewrap_many_datakey.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/result/update.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/search_index_model.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/search_index_view.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/uri.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/validation_criteria.cpp
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/write_concern.cpp
|
||||
)
|
||||
set(MONGOCXX_COMPILER_VERSION "${CMAKE_CXX_COMPILER_VERSION}")
|
||||
set(MONGOCXX_COMPILER_ID "${CMAKE_CXX_COMPILER_ID}")
|
||||
set(MONGOCXX_LINK_WITH_STATIC_MONGOC 1)
|
||||
set(MONGOCXX_BUILD_STATIC 1)
|
||||
if(ENABLE_SSL)
|
||||
set(MONGOCXX_ENABLE_SSL 1)
|
||||
endif()
|
||||
|
||||
configure_file(
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/config/config.hpp.in
|
||||
${MONGOCXX_BINARY_DIR}/lib/mongocxx/v_noabi/mongocxx/config/config.hpp
|
||||
)
|
||||
configure_file(
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/config/version.hpp.in
|
||||
${MONGOCXX_BINARY_DIR}/lib/mongocxx/v_noabi/mongocxx/config/version.hpp
|
||||
)
|
||||
configure_file(
|
||||
${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi/mongocxx/config/private/config.hh.in
|
||||
${MONGOCXX_BINARY_DIR}/lib/mongocxx/v_noabi/mongocxx/config/private/config.hh
|
||||
)
|
||||
|
||||
add_library(_mongocxx ${MONGOCXX_SOURCES})
|
||||
add_library(ch_contrib::mongocxx ALIAS _mongocxx)
|
||||
target_include_directories(_mongocxx SYSTEM PUBLIC "${MONGOCXX_SOURCES_DIR}/include/mongocxx/v_noabi" "${MONGOCXX_SOURCES_DIR}/lib/mongocxx/v_noabi" "${MONGOCXX_BINARY_DIR}/lib/mongocxx/v_noabi")
|
||||
target_compile_definitions(_mongocxx PUBLIC MONGOCXX_STATIC)
|
||||
target_link_libraries(_mongocxx ch_contrib::bsoncxx ch_contrib::libmongoc)
|
||||
|
||||
generate_export_header(_mongocxx
|
||||
BASE_NAME MONGOCXX
|
||||
EXPORT_MACRO_NAME MONGOCXX_API
|
||||
NO_EXPORT_MACRO_NAME MONGOCXX_PRIVATE
|
||||
EXPORT_FILE_NAME ${MONGOCXX_BINARY_DIR}/lib/mongocxx/v_noabi/mongocxx/config/export.hpp
|
||||
STATIC_DEFINE MONGOCXX_STATIC
|
||||
)
|
2
contrib/openssl
vendored
2
contrib/openssl
vendored
@ -1 +1 @@
|
||||
Subproject commit 66deddc1e53cda8706604a019777259372d1bd62
|
||||
Subproject commit b3e62c440f390e12e77c80675f883af82ad3d5ed
|
1
contrib/postgres
vendored
Submodule
1
contrib/postgres
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit c041ed8cbda02eb881de8d7645ca96b6e4b2327d
|
81
contrib/postgres-cmake/CMakeLists.txt
Normal file
81
contrib/postgres-cmake/CMakeLists.txt
Normal file
@ -0,0 +1,81 @@
|
||||
# Build description for libpq which is part of the PostgreSQL sources
|
||||
|
||||
set(POSTGRES_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/postgres")
|
||||
set(LIBPQ_SOURCE_DIR "${POSTGRES_SOURCE_DIR}/src/interfaces/libpq")
|
||||
set(LIBPQ_CMAKE_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/postgres-cmake")
|
||||
|
||||
set(SRCS
|
||||
"${LIBPQ_SOURCE_DIR}/fe-auth.c"
|
||||
"${LIBPQ_SOURCE_DIR}/fe-auth-scram.c"
|
||||
"${LIBPQ_SOURCE_DIR}/fe-connect.c"
|
||||
"${LIBPQ_SOURCE_DIR}/fe-exec.c"
|
||||
"${LIBPQ_SOURCE_DIR}/fe-lobj.c"
|
||||
"${LIBPQ_SOURCE_DIR}/fe-misc.c"
|
||||
"${LIBPQ_SOURCE_DIR}/fe-print.c"
|
||||
"${LIBPQ_SOURCE_DIR}/fe-trace.c"
|
||||
"${LIBPQ_SOURCE_DIR}/fe-protocol3.c"
|
||||
"${LIBPQ_SOURCE_DIR}/fe-secure.c"
|
||||
"${LIBPQ_SOURCE_DIR}/fe-secure-common.c"
|
||||
"${LIBPQ_SOURCE_DIR}/fe-secure-openssl.c"
|
||||
"${LIBPQ_SOURCE_DIR}/legacy-pqsignal.c"
|
||||
"${LIBPQ_SOURCE_DIR}/libpq-events.c"
|
||||
"${LIBPQ_SOURCE_DIR}/pqexpbuffer.c"
|
||||
|
||||
"${POSTGRES_SOURCE_DIR}/src/common/scram-common.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/common/sha2.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/common/sha1.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/common/md5.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/common/md5_common.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/common/hmac_openssl.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/common/cryptohash.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/common/saslprep.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/common/unicode_norm.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/common/ip.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/common/jsonapi.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/common/wchar.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/common/base64.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/common/link-canary.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/common/fe_memutils.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/common/string.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/common/pg_get_line.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/common/pg_prng.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/common/stringinfo.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/common/psprintf.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/common/encnames.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/common/logging.c"
|
||||
|
||||
"${POSTGRES_SOURCE_DIR}/src/port/snprintf.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/port/strlcat.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/port/strlcpy.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/port/strerror.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/port/inet_net_ntop.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/port/getpeereid.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/port/chklocale.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/port/noblock.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/port/pg_strong_random.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/port/pgstrcasecmp.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/port/pg_bitutils.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/port/thread.c"
|
||||
"${POSTGRES_SOURCE_DIR}/src/port/path.c"
|
||||
)
|
||||
|
||||
add_library(_libpq ${SRCS})
|
||||
|
||||
add_definitions(-DHAVE_BIO_METH_NEW)
|
||||
add_definitions(-DHAVE_HMAC_CTX_NEW)
|
||||
add_definitions(-DHAVE_HMAC_CTX_FREE)
|
||||
|
||||
target_include_directories (_libpq SYSTEM PUBLIC ${LIBPQ_SOURCE_DIR})
|
||||
target_include_directories (_libpq SYSTEM PUBLIC "${POSTGRES_SOURCE_DIR}/src/include")
|
||||
target_include_directories (_libpq SYSTEM PUBLIC "${LIBPQ_CMAKE_SOURCE_DIR}") # pre-generated headers
|
||||
|
||||
# NOTE: this is a dirty hack to avoid and instead pg_config.h should be shipped
|
||||
# for different OS'es like for jemalloc, not one generic for all OS'es like
|
||||
# now.
|
||||
if (OS_DARWIN OR OS_FREEBSD OR USE_MUSL)
|
||||
target_compile_definitions(_libpq PRIVATE -DSTRERROR_R_INT=1)
|
||||
endif()
|
||||
|
||||
target_link_libraries (_libpq PRIVATE OpenSSL::SSL)
|
||||
|
||||
add_library(ch_contrib::libpq ALIAS _libpq)
|
471
contrib/postgres-cmake/nodes/nodetags.h
Normal file
471
contrib/postgres-cmake/nodes/nodetags.h
Normal file
@ -0,0 +1,471 @@
|
||||
/*-------------------------------------------------------------------------
|
||||
*
|
||||
* nodetags.h
|
||||
* Generated node infrastructure code
|
||||
*
|
||||
* Portions Copyright (c) 1996-2023, PostgreSQL Global Development Group
|
||||
* Portions Copyright (c) 1994, Regents of the University of California
|
||||
*
|
||||
* NOTES
|
||||
* ******************************
|
||||
* *** DO NOT EDIT THIS FILE! ***
|
||||
* ******************************
|
||||
*
|
||||
* It has been GENERATED by src/backend/nodes/gen_node_support.pl
|
||||
*
|
||||
*-------------------------------------------------------------------------
|
||||
*/
|
||||
T_List = 1,
|
||||
T_Alias = 2,
|
||||
T_RangeVar = 3,
|
||||
T_TableFunc = 4,
|
||||
T_IntoClause = 5,
|
||||
T_Var = 6,
|
||||
T_Const = 7,
|
||||
T_Param = 8,
|
||||
T_Aggref = 9,
|
||||
T_GroupingFunc = 10,
|
||||
T_WindowFunc = 11,
|
||||
T_SubscriptingRef = 12,
|
||||
T_FuncExpr = 13,
|
||||
T_NamedArgExpr = 14,
|
||||
T_OpExpr = 15,
|
||||
T_DistinctExpr = 16,
|
||||
T_NullIfExpr = 17,
|
||||
T_ScalarArrayOpExpr = 18,
|
||||
T_BoolExpr = 19,
|
||||
T_SubLink = 20,
|
||||
T_SubPlan = 21,
|
||||
T_AlternativeSubPlan = 22,
|
||||
T_FieldSelect = 23,
|
||||
T_FieldStore = 24,
|
||||
T_RelabelType = 25,
|
||||
T_CoerceViaIO = 26,
|
||||
T_ArrayCoerceExpr = 27,
|
||||
T_ConvertRowtypeExpr = 28,
|
||||
T_CollateExpr = 29,
|
||||
T_CaseExpr = 30,
|
||||
T_CaseWhen = 31,
|
||||
T_CaseTestExpr = 32,
|
||||
T_ArrayExpr = 33,
|
||||
T_RowExpr = 34,
|
||||
T_RowCompareExpr = 35,
|
||||
T_CoalesceExpr = 36,
|
||||
T_MinMaxExpr = 37,
|
||||
T_SQLValueFunction = 38,
|
||||
T_XmlExpr = 39,
|
||||
T_JsonFormat = 40,
|
||||
T_JsonReturning = 41,
|
||||
T_JsonValueExpr = 42,
|
||||
T_JsonConstructorExpr = 43,
|
||||
T_JsonIsPredicate = 44,
|
||||
T_NullTest = 45,
|
||||
T_BooleanTest = 46,
|
||||
T_CoerceToDomain = 47,
|
||||
T_CoerceToDomainValue = 48,
|
||||
T_SetToDefault = 49,
|
||||
T_CurrentOfExpr = 50,
|
||||
T_NextValueExpr = 51,
|
||||
T_InferenceElem = 52,
|
||||
T_TargetEntry = 53,
|
||||
T_RangeTblRef = 54,
|
||||
T_JoinExpr = 55,
|
||||
T_FromExpr = 56,
|
||||
T_OnConflictExpr = 57,
|
||||
T_Query = 58,
|
||||
T_TypeName = 59,
|
||||
T_ColumnRef = 60,
|
||||
T_ParamRef = 61,
|
||||
T_A_Expr = 62,
|
||||
T_A_Const = 63,
|
||||
T_TypeCast = 64,
|
||||
T_CollateClause = 65,
|
||||
T_RoleSpec = 66,
|
||||
T_FuncCall = 67,
|
||||
T_A_Star = 68,
|
||||
T_A_Indices = 69,
|
||||
T_A_Indirection = 70,
|
||||
T_A_ArrayExpr = 71,
|
||||
T_ResTarget = 72,
|
||||
T_MultiAssignRef = 73,
|
||||
T_SortBy = 74,
|
||||
T_WindowDef = 75,
|
||||
T_RangeSubselect = 76,
|
||||
T_RangeFunction = 77,
|
||||
T_RangeTableFunc = 78,
|
||||
T_RangeTableFuncCol = 79,
|
||||
T_RangeTableSample = 80,
|
||||
T_ColumnDef = 81,
|
||||
T_TableLikeClause = 82,
|
||||
T_IndexElem = 83,
|
||||
T_DefElem = 84,
|
||||
T_LockingClause = 85,
|
||||
T_XmlSerialize = 86,
|
||||
T_PartitionElem = 87,
|
||||
T_PartitionSpec = 88,
|
||||
T_PartitionBoundSpec = 89,
|
||||
T_PartitionRangeDatum = 90,
|
||||
T_PartitionCmd = 91,
|
||||
T_RangeTblEntry = 92,
|
||||
T_RTEPermissionInfo = 93,
|
||||
T_RangeTblFunction = 94,
|
||||
T_TableSampleClause = 95,
|
||||
T_WithCheckOption = 96,
|
||||
T_SortGroupClause = 97,
|
||||
T_GroupingSet = 98,
|
||||
T_WindowClause = 99,
|
||||
T_RowMarkClause = 100,
|
||||
T_WithClause = 101,
|
||||
T_InferClause = 102,
|
||||
T_OnConflictClause = 103,
|
||||
T_CTESearchClause = 104,
|
||||
T_CTECycleClause = 105,
|
||||
T_CommonTableExpr = 106,
|
||||
T_MergeWhenClause = 107,
|
||||
T_MergeAction = 108,
|
||||
T_TriggerTransition = 109,
|
||||
T_JsonOutput = 110,
|
||||
T_JsonKeyValue = 111,
|
||||
T_JsonObjectConstructor = 112,
|
||||
T_JsonArrayConstructor = 113,
|
||||
T_JsonArrayQueryConstructor = 114,
|
||||
T_JsonAggConstructor = 115,
|
||||
T_JsonObjectAgg = 116,
|
||||
T_JsonArrayAgg = 117,
|
||||
T_RawStmt = 118,
|
||||
T_InsertStmt = 119,
|
||||
T_DeleteStmt = 120,
|
||||
T_UpdateStmt = 121,
|
||||
T_MergeStmt = 122,
|
||||
T_SelectStmt = 123,
|
||||
T_SetOperationStmt = 124,
|
||||
T_ReturnStmt = 125,
|
||||
T_PLAssignStmt = 126,
|
||||
T_CreateSchemaStmt = 127,
|
||||
T_AlterTableStmt = 128,
|
||||
T_ReplicaIdentityStmt = 129,
|
||||
T_AlterTableCmd = 130,
|
||||
T_AlterCollationStmt = 131,
|
||||
T_AlterDomainStmt = 132,
|
||||
T_GrantStmt = 133,
|
||||
T_ObjectWithArgs = 134,
|
||||
T_AccessPriv = 135,
|
||||
T_GrantRoleStmt = 136,
|
||||
T_AlterDefaultPrivilegesStmt = 137,
|
||||
T_CopyStmt = 138,
|
||||
T_VariableSetStmt = 139,
|
||||
T_VariableShowStmt = 140,
|
||||
T_CreateStmt = 141,
|
||||
T_Constraint = 142,
|
||||
T_CreateTableSpaceStmt = 143,
|
||||
T_DropTableSpaceStmt = 144,
|
||||
T_AlterTableSpaceOptionsStmt = 145,
|
||||
T_AlterTableMoveAllStmt = 146,
|
||||
T_CreateExtensionStmt = 147,
|
||||
T_AlterExtensionStmt = 148,
|
||||
T_AlterExtensionContentsStmt = 149,
|
||||
T_CreateFdwStmt = 150,
|
||||
T_AlterFdwStmt = 151,
|
||||
T_CreateForeignServerStmt = 152,
|
||||
T_AlterForeignServerStmt = 153,
|
||||
T_CreateForeignTableStmt = 154,
|
||||
T_CreateUserMappingStmt = 155,
|
||||
T_AlterUserMappingStmt = 156,
|
||||
T_DropUserMappingStmt = 157,
|
||||
T_ImportForeignSchemaStmt = 158,
|
||||
T_CreatePolicyStmt = 159,
|
||||
T_AlterPolicyStmt = 160,
|
||||
T_CreateAmStmt = 161,
|
||||
T_CreateTrigStmt = 162,
|
||||
T_CreateEventTrigStmt = 163,
|
||||
T_AlterEventTrigStmt = 164,
|
||||
T_CreatePLangStmt = 165,
|
||||
T_CreateRoleStmt = 166,
|
||||
T_AlterRoleStmt = 167,
|
||||
T_AlterRoleSetStmt = 168,
|
||||
T_DropRoleStmt = 169,
|
||||
T_CreateSeqStmt = 170,
|
||||
T_AlterSeqStmt = 171,
|
||||
T_DefineStmt = 172,
|
||||
T_CreateDomainStmt = 173,
|
||||
T_CreateOpClassStmt = 174,
|
||||
T_CreateOpClassItem = 175,
|
||||
T_CreateOpFamilyStmt = 176,
|
||||
T_AlterOpFamilyStmt = 177,
|
||||
T_DropStmt = 178,
|
||||
T_TruncateStmt = 179,
|
||||
T_CommentStmt = 180,
|
||||
T_SecLabelStmt = 181,
|
||||
T_DeclareCursorStmt = 182,
|
||||
T_ClosePortalStmt = 183,
|
||||
T_FetchStmt = 184,
|
||||
T_IndexStmt = 185,
|
||||
T_CreateStatsStmt = 186,
|
||||
T_StatsElem = 187,
|
||||
T_AlterStatsStmt = 188,
|
||||
T_CreateFunctionStmt = 189,
|
||||
T_FunctionParameter = 190,
|
||||
T_AlterFunctionStmt = 191,
|
||||
T_DoStmt = 192,
|
||||
T_InlineCodeBlock = 193,
|
||||
T_CallStmt = 194,
|
||||
T_CallContext = 195,
|
||||
T_RenameStmt = 196,
|
||||
T_AlterObjectDependsStmt = 197,
|
||||
T_AlterObjectSchemaStmt = 198,
|
||||
T_AlterOwnerStmt = 199,
|
||||
T_AlterOperatorStmt = 200,
|
||||
T_AlterTypeStmt = 201,
|
||||
T_RuleStmt = 202,
|
||||
T_NotifyStmt = 203,
|
||||
T_ListenStmt = 204,
|
||||
T_UnlistenStmt = 205,
|
||||
T_TransactionStmt = 206,
|
||||
T_CompositeTypeStmt = 207,
|
||||
T_CreateEnumStmt = 208,
|
||||
T_CreateRangeStmt = 209,
|
||||
T_AlterEnumStmt = 210,
|
||||
T_ViewStmt = 211,
|
||||
T_LoadStmt = 212,
|
||||
T_CreatedbStmt = 213,
|
||||
T_AlterDatabaseStmt = 214,
|
||||
T_AlterDatabaseRefreshCollStmt = 215,
|
||||
T_AlterDatabaseSetStmt = 216,
|
||||
T_DropdbStmt = 217,
|
||||
T_AlterSystemStmt = 218,
|
||||
T_ClusterStmt = 219,
|
||||
T_VacuumStmt = 220,
|
||||
T_VacuumRelation = 221,
|
||||
T_ExplainStmt = 222,
|
||||
T_CreateTableAsStmt = 223,
|
||||
T_RefreshMatViewStmt = 224,
|
||||
T_CheckPointStmt = 225,
|
||||
T_DiscardStmt = 226,
|
||||
T_LockStmt = 227,
|
||||
T_ConstraintsSetStmt = 228,
|
||||
T_ReindexStmt = 229,
|
||||
T_CreateConversionStmt = 230,
|
||||
T_CreateCastStmt = 231,
|
||||
T_CreateTransformStmt = 232,
|
||||
T_PrepareStmt = 233,
|
||||
T_ExecuteStmt = 234,
|
||||
T_DeallocateStmt = 235,
|
||||
T_DropOwnedStmt = 236,
|
||||
T_ReassignOwnedStmt = 237,
|
||||
T_AlterTSDictionaryStmt = 238,
|
||||
T_AlterTSConfigurationStmt = 239,
|
||||
T_PublicationTable = 240,
|
||||
T_PublicationObjSpec = 241,
|
||||
T_CreatePublicationStmt = 242,
|
||||
T_AlterPublicationStmt = 243,
|
||||
T_CreateSubscriptionStmt = 244,
|
||||
T_AlterSubscriptionStmt = 245,
|
||||
T_DropSubscriptionStmt = 246,
|
||||
T_PlannerGlobal = 247,
|
||||
T_PlannerInfo = 248,
|
||||
T_RelOptInfo = 249,
|
||||
T_IndexOptInfo = 250,
|
||||
T_ForeignKeyOptInfo = 251,
|
||||
T_StatisticExtInfo = 252,
|
||||
T_JoinDomain = 253,
|
||||
T_EquivalenceClass = 254,
|
||||
T_EquivalenceMember = 255,
|
||||
T_PathKey = 256,
|
||||
T_PathTarget = 257,
|
||||
T_ParamPathInfo = 258,
|
||||
T_Path = 259,
|
||||
T_IndexPath = 260,
|
||||
T_IndexClause = 261,
|
||||
T_BitmapHeapPath = 262,
|
||||
T_BitmapAndPath = 263,
|
||||
T_BitmapOrPath = 264,
|
||||
T_TidPath = 265,
|
||||
T_TidRangePath = 266,
|
||||
T_SubqueryScanPath = 267,
|
||||
T_ForeignPath = 268,
|
||||
T_CustomPath = 269,
|
||||
T_AppendPath = 270,
|
||||
T_MergeAppendPath = 271,
|
||||
T_GroupResultPath = 272,
|
||||
T_MaterialPath = 273,
|
||||
T_MemoizePath = 274,
|
||||
T_UniquePath = 275,
|
||||
T_GatherPath = 276,
|
||||
T_GatherMergePath = 277,
|
||||
T_NestPath = 278,
|
||||
T_MergePath = 279,
|
||||
T_HashPath = 280,
|
||||
T_ProjectionPath = 281,
|
||||
T_ProjectSetPath = 282,
|
||||
T_SortPath = 283,
|
||||
T_IncrementalSortPath = 284,
|
||||
T_GroupPath = 285,
|
||||
T_UpperUniquePath = 286,
|
||||
T_AggPath = 287,
|
||||
T_GroupingSetData = 288,
|
||||
T_RollupData = 289,
|
||||
T_GroupingSetsPath = 290,
|
||||
T_MinMaxAggPath = 291,
|
||||
T_WindowAggPath = 292,
|
||||
T_SetOpPath = 293,
|
||||
T_RecursiveUnionPath = 294,
|
||||
T_LockRowsPath = 295,
|
||||
T_ModifyTablePath = 296,
|
||||
T_LimitPath = 297,
|
||||
T_RestrictInfo = 298,
|
||||
T_PlaceHolderVar = 299,
|
||||
T_SpecialJoinInfo = 300,
|
||||
T_OuterJoinClauseInfo = 301,
|
||||
T_AppendRelInfo = 302,
|
||||
T_RowIdentityVarInfo = 303,
|
||||
T_PlaceHolderInfo = 304,
|
||||
T_MinMaxAggInfo = 305,
|
||||
T_PlannerParamItem = 306,
|
||||
T_AggInfo = 307,
|
||||
T_AggTransInfo = 308,
|
||||
T_PlannedStmt = 309,
|
||||
T_Result = 310,
|
||||
T_ProjectSet = 311,
|
||||
T_ModifyTable = 312,
|
||||
T_Append = 313,
|
||||
T_MergeAppend = 314,
|
||||
T_RecursiveUnion = 315,
|
||||
T_BitmapAnd = 316,
|
||||
T_BitmapOr = 317,
|
||||
T_SeqScan = 318,
|
||||
T_SampleScan = 319,
|
||||
T_IndexScan = 320,
|
||||
T_IndexOnlyScan = 321,
|
||||
T_BitmapIndexScan = 322,
|
||||
T_BitmapHeapScan = 323,
|
||||
T_TidScan = 324,
|
||||
T_TidRangeScan = 325,
|
||||
T_SubqueryScan = 326,
|
||||
T_FunctionScan = 327,
|
||||
T_ValuesScan = 328,
|
||||
T_TableFuncScan = 329,
|
||||
T_CteScan = 330,
|
||||
T_NamedTuplestoreScan = 331,
|
||||
T_WorkTableScan = 332,
|
||||
T_ForeignScan = 333,
|
||||
T_CustomScan = 334,
|
||||
T_NestLoop = 335,
|
||||
T_NestLoopParam = 336,
|
||||
T_MergeJoin = 337,
|
||||
T_HashJoin = 338,
|
||||
T_Material = 339,
|
||||
T_Memoize = 340,
|
||||
T_Sort = 341,
|
||||
T_IncrementalSort = 342,
|
||||
T_Group = 343,
|
||||
T_Agg = 344,
|
||||
T_WindowAgg = 345,
|
||||
T_Unique = 346,
|
||||
T_Gather = 347,
|
||||
T_GatherMerge = 348,
|
||||
T_Hash = 349,
|
||||
T_SetOp = 350,
|
||||
T_LockRows = 351,
|
||||
T_Limit = 352,
|
||||
T_PlanRowMark = 353,
|
||||
T_PartitionPruneInfo = 354,
|
||||
T_PartitionedRelPruneInfo = 355,
|
||||
T_PartitionPruneStepOp = 356,
|
||||
T_PartitionPruneStepCombine = 357,
|
||||
T_PlanInvalItem = 358,
|
||||
T_ExprState = 359,
|
||||
T_IndexInfo = 360,
|
||||
T_ExprContext = 361,
|
||||
T_ReturnSetInfo = 362,
|
||||
T_ProjectionInfo = 363,
|
||||
T_JunkFilter = 364,
|
||||
T_OnConflictSetState = 365,
|
||||
T_MergeActionState = 366,
|
||||
T_ResultRelInfo = 367,
|
||||
T_EState = 368,
|
||||
T_WindowFuncExprState = 369,
|
||||
T_SetExprState = 370,
|
||||
T_SubPlanState = 371,
|
||||
T_DomainConstraintState = 372,
|
||||
T_ResultState = 373,
|
||||
T_ProjectSetState = 374,
|
||||
T_ModifyTableState = 375,
|
||||
T_AppendState = 376,
|
||||
T_MergeAppendState = 377,
|
||||
T_RecursiveUnionState = 378,
|
||||
T_BitmapAndState = 379,
|
||||
T_BitmapOrState = 380,
|
||||
T_ScanState = 381,
|
||||
T_SeqScanState = 382,
|
||||
T_SampleScanState = 383,
|
||||
T_IndexScanState = 384,
|
||||
T_IndexOnlyScanState = 385,
|
||||
T_BitmapIndexScanState = 386,
|
||||
T_BitmapHeapScanState = 387,
|
||||
T_TidScanState = 388,
|
||||
T_TidRangeScanState = 389,
|
||||
T_SubqueryScanState = 390,
|
||||
T_FunctionScanState = 391,
|
||||
T_ValuesScanState = 392,
|
||||
T_TableFuncScanState = 393,
|
||||
T_CteScanState = 394,
|
||||
T_NamedTuplestoreScanState = 395,
|
||||
T_WorkTableScanState = 396,
|
||||
T_ForeignScanState = 397,
|
||||
T_CustomScanState = 398,
|
||||
T_JoinState = 399,
|
||||
T_NestLoopState = 400,
|
||||
T_MergeJoinState = 401,
|
||||
T_HashJoinState = 402,
|
||||
T_MaterialState = 403,
|
||||
T_MemoizeState = 404,
|
||||
T_SortState = 405,
|
||||
T_IncrementalSortState = 406,
|
||||
T_GroupState = 407,
|
||||
T_AggState = 408,
|
||||
T_WindowAggState = 409,
|
||||
T_UniqueState = 410,
|
||||
T_GatherState = 411,
|
||||
T_GatherMergeState = 412,
|
||||
T_HashState = 413,
|
||||
T_SetOpState = 414,
|
||||
T_LockRowsState = 415,
|
||||
T_LimitState = 416,
|
||||
T_IndexAmRoutine = 417,
|
||||
T_TableAmRoutine = 418,
|
||||
T_TsmRoutine = 419,
|
||||
T_EventTriggerData = 420,
|
||||
T_TriggerData = 421,
|
||||
T_TupleTableSlot = 422,
|
||||
T_FdwRoutine = 423,
|
||||
T_Bitmapset = 424,
|
||||
T_ExtensibleNode = 425,
|
||||
T_ErrorSaveContext = 426,
|
||||
T_IdentifySystemCmd = 427,
|
||||
T_BaseBackupCmd = 428,
|
||||
T_CreateReplicationSlotCmd = 429,
|
||||
T_DropReplicationSlotCmd = 430,
|
||||
T_StartReplicationCmd = 431,
|
||||
T_ReadReplicationSlotCmd = 432,
|
||||
T_TimeLineHistoryCmd = 433,
|
||||
T_SupportRequestSimplify = 434,
|
||||
T_SupportRequestSelectivity = 435,
|
||||
T_SupportRequestCost = 436,
|
||||
T_SupportRequestRows = 437,
|
||||
T_SupportRequestIndexCondition = 438,
|
||||
T_SupportRequestWFuncMonotonic = 439,
|
||||
T_SupportRequestOptimizeWindowClause = 440,
|
||||
T_Integer = 441,
|
||||
T_Float = 442,
|
||||
T_Boolean = 443,
|
||||
T_String = 444,
|
||||
T_BitString = 445,
|
||||
T_ForeignKeyCacheInfo = 446,
|
||||
T_IntList = 447,
|
||||
T_OidList = 448,
|
||||
T_XidList = 449,
|
||||
T_AllocSetContext = 450,
|
||||
T_GenerationContext = 451,
|
||||
T_SlabContext = 452,
|
||||
T_TIDBitmap = 453,
|
||||
T_WindowObjectData = 454,
|
803
contrib/postgres-cmake/pg_config.h
Normal file
803
contrib/postgres-cmake/pg_config.h
Normal file
@ -0,0 +1,803 @@
|
||||
/* src/include/pg_config.h. Generated from pg_config.h.in by configure. */
|
||||
/* src/include/pg_config.h.in. Generated from configure.in by autoheader. */
|
||||
|
||||
/* Define if building universal (internal helper macro) */
|
||||
/* #undef AC_APPLE_UNIVERSAL_BUILD */
|
||||
|
||||
/* The normal alignment of `double', in bytes. */
|
||||
#define ALIGNOF_DOUBLE 4
|
||||
|
||||
/* The normal alignment of `int', in bytes. */
|
||||
#define ALIGNOF_INT 4
|
||||
|
||||
/* The normal alignment of `long', in bytes. */
|
||||
#define ALIGNOF_LONG 4
|
||||
|
||||
/* The normal alignment of `long long int', in bytes. */
|
||||
#define ALIGNOF_LONG_LONG_INT 4
|
||||
|
||||
/* The normal alignment of `short', in bytes. */
|
||||
#define ALIGNOF_SHORT 2
|
||||
|
||||
/* Size of a disk block --- this also limits the size of a tuple. You can set
|
||||
it bigger if you need bigger tuples (although TOAST should reduce the need
|
||||
to have large tuples, since fields can be spread across multiple tuples).
|
||||
BLCKSZ must be a power of 2. The maximum possible value of BLCKSZ is
|
||||
currently 2^15 (32768). This is determined by the 15-bit widths of the
|
||||
lp_off and lp_len fields in ItemIdData (see include/storage/itemid.h).
|
||||
Changing BLCKSZ requires an initdb. */
|
||||
#define BLCKSZ 8192
|
||||
|
||||
/* Define to the default TCP port number on which the server listens and to
|
||||
which clients will try to connect. This can be overridden at run-time, but
|
||||
it's convenient if your clients have the right default compiled in.
|
||||
(--with-pgport=PORTNUM) */
|
||||
#define DEF_PGPORT 5432
|
||||
|
||||
/* Define to the default TCP port number as a string constant. */
|
||||
#define DEF_PGPORT_STR "5432"
|
||||
|
||||
/* Define to the file name extension of dynamically-loadable modules. */
|
||||
#define DLSUFFIX ".so"
|
||||
|
||||
/* Define to build with GSSAPI support. (--with-gssapi) */
|
||||
//#define ENABLE_GSS 0
|
||||
|
||||
/* Define to 1 if you want National Language Support. (--enable-nls) */
|
||||
/* #undef ENABLE_NLS */
|
||||
|
||||
/* Define to 1 to build client libraries as thread-safe code.
|
||||
(--enable-thread-safety) */
|
||||
#define ENABLE_THREAD_SAFETY 1
|
||||
|
||||
/* Define to nothing if C supports flexible array members, and to 1 if it does
|
||||
not. That way, with a declaration like `struct s { int n; double
|
||||
d[FLEXIBLE_ARRAY_MEMBER]; };', the struct hack can be used with pre-C99
|
||||
compilers. When computing the size of such an object, don't use 'sizeof
|
||||
(struct s)' as it overestimates the size. Use 'offsetof (struct s, d)'
|
||||
instead. Don't use 'offsetof (struct s, d[0])', as this doesn't work with
|
||||
MSVC and with C++ compilers. */
|
||||
#define FLEXIBLE_ARRAY_MEMBER /**/
|
||||
|
||||
/* float4 values are passed by value if 'true', by reference if 'false' */
|
||||
#define FLOAT4PASSBYVAL true
|
||||
|
||||
/* float8, int8, and related values are passed by value if 'true', by
|
||||
reference if 'false' */
|
||||
#define FLOAT8PASSBYVAL false
|
||||
|
||||
/* Define to 1 if you have the `append_history' function. */
|
||||
/* #undef HAVE_APPEND_HISTORY */
|
||||
|
||||
/* Define to 1 if you want to use atomics if available. */
|
||||
#define HAVE_ATOMICS 1
|
||||
|
||||
/* Define to 1 if you have the <atomic.h> header file. */
|
||||
/* #undef HAVE_ATOMIC_H */
|
||||
|
||||
/* Define to 1 if you have the `cbrt' function. */
|
||||
#define HAVE_CBRT 1
|
||||
|
||||
/* Define to 1 if you have the `class' function. */
|
||||
/* #undef HAVE_CLASS */
|
||||
|
||||
/* Define to 1 if you have the <crtdefs.h> header file. */
|
||||
/* #undef HAVE_CRTDEFS_H */
|
||||
|
||||
/* Define to 1 if you have the `crypt' function. */
|
||||
#define HAVE_CRYPT 1
|
||||
|
||||
/* Define to 1 if you have the <crypt.h> header file. */
|
||||
#define HAVE_CRYPT_H 1
|
||||
|
||||
/* Define to 1 if you have the declaration of `fdatasync', and to 0 if you
|
||||
don't. */
|
||||
#define HAVE_DECL_FDATASYNC 1
|
||||
|
||||
/* Define to 1 if you have the declaration of `F_FULLFSYNC', and to 0 if you
|
||||
don't. */
|
||||
#define HAVE_DECL_F_FULLFSYNC 0
|
||||
|
||||
/* Define to 1 if you have the declaration of `posix_fadvise', and to 0 if you
|
||||
don't. */
|
||||
#define HAVE_DECL_POSIX_FADVISE 1
|
||||
|
||||
/* Define to 1 if you have the declaration of `snprintf', and to 0 if you
|
||||
don't. */
|
||||
#define HAVE_DECL_SNPRINTF 1
|
||||
|
||||
/* Define to 1 if you have the declaration of `strlcat', and to 0 if you
|
||||
don't. */
|
||||
#if OS_DARWIN
|
||||
#define HAVE_DECL_STRLCAT 1
|
||||
#endif
|
||||
|
||||
/* Define to 1 if you have the declaration of `strlcpy', and to 0 if you
|
||||
don't. */
|
||||
#if OS_DARWIN
|
||||
#define HAVE_DECL_STRLCPY 1
|
||||
#endif
|
||||
|
||||
/* Define to 1 if you have the declaration of `sys_siglist', and to 0 if you
|
||||
don't. */
|
||||
#define HAVE_DECL_SYS_SIGLIST 1
|
||||
|
||||
/* Define to 1 if you have the declaration of `vsnprintf', and to 0 if you
|
||||
don't. */
|
||||
#define HAVE_DECL_VSNPRINTF 1
|
||||
|
||||
/* Define to 1 if you have the <dld.h> header file. */
|
||||
/* #undef HAVE_DLD_H */
|
||||
|
||||
/* Define to 1 if you have the <editline/history.h> header file. */
|
||||
/* #undef HAVE_EDITLINE_HISTORY_H */
|
||||
|
||||
/* Define to 1 if you have the <editline/readline.h> header file. */
|
||||
#define HAVE_EDITLINE_READLINE_H 1
|
||||
|
||||
/* Define to 1 if you have the `fpclass' function. */
|
||||
/* #undef HAVE_FPCLASS */
|
||||
|
||||
/* Define to 1 if you have the `fp_class' function. */
|
||||
/* #undef HAVE_FP_CLASS */
|
||||
|
||||
/* Define to 1 if you have the `fp_class_d' function. */
|
||||
/* #undef HAVE_FP_CLASS_D */
|
||||
|
||||
/* Define to 1 if you have the <fp_class.h> header file. */
|
||||
/* #undef HAVE_FP_CLASS_H */
|
||||
|
||||
/* Define to 1 if fseeko (and presumably ftello) exists and is declared. */
|
||||
#define HAVE_FSEEKO 1
|
||||
|
||||
/* Define to 1 if you have __atomic_compare_exchange_n(int *, int *, int). */
|
||||
/* #undef HAVE_GCC__ATOMIC_INT32_CAS */
|
||||
|
||||
/* Define to 1 if you have __atomic_compare_exchange_n(int64 *, int *, int64).
|
||||
*/
|
||||
/* #undef HAVE_GCC__ATOMIC_INT64_CAS */
|
||||
|
||||
/* Define to 1 if you have __sync_lock_test_and_set(char *) and friends. */
|
||||
#define HAVE_GCC__SYNC_CHAR_TAS 1
|
||||
|
||||
/* Define to 1 if you have __sync_compare_and_swap(int *, int, int). */
|
||||
/* #undef HAVE_GCC__SYNC_INT32_CAS */
|
||||
|
||||
/* Define to 1 if you have __sync_lock_test_and_set(int *) and friends. */
|
||||
#define HAVE_GCC__SYNC_INT32_TAS 1
|
||||
|
||||
/* Define to 1 if you have __sync_compare_and_swap(int64 *, int64, int64). */
|
||||
/* #undef HAVE_GCC__SYNC_INT64_CAS */
|
||||
|
||||
/* Define to 1 if you have the `getifaddrs' function. */
|
||||
#define HAVE_GETIFADDRS 1
|
||||
|
||||
/* Define to 1 if you have the `getopt' function. */
|
||||
#define HAVE_GETOPT 1
|
||||
|
||||
/* Define to 1 if you have the <getopt.h> header file. */
|
||||
#define HAVE_GETOPT_H 1
|
||||
|
||||
/* Define to 1 if you have the `getopt_long' function. */
|
||||
#define HAVE_GETOPT_LONG 1
|
||||
|
||||
/* Define to 1 if you have the `getpeereid' function. */
|
||||
/* #undef HAVE_GETPEEREID */
|
||||
|
||||
/* Define to 1 if you have the `getpeerucred' function. */
|
||||
/* #undef HAVE_GETPEERUCRED */
|
||||
|
||||
/* Define to 1 if you have the <gssapi_ext.h> header file. */
|
||||
/* #undef HAVE_GSSAPI_EXT_H */
|
||||
|
||||
/* Define to 1 if you have the <gssapi/gssapi_ext.h> header file. */
|
||||
/* #undef HAVE_GSSAPI_GSSAPI_EXT_H */
|
||||
|
||||
/* Define to 1 if you have the <gssapi/gssapi.h> header file. */
|
||||
//#define HAVE_GSSAPI_GSSAPI_H 0
|
||||
|
||||
/* Define to 1 if you have the <gssapi.h> header file. */
|
||||
/* #undef HAVE_GSSAPI_H */
|
||||
|
||||
/* Define to 1 if you have the <history.h> header file. */
|
||||
/* #undef HAVE_HISTORY_H */
|
||||
|
||||
/* Define to 1 if you have the `history_truncate_file' function. */
|
||||
#define HAVE_HISTORY_TRUNCATE_FILE 1
|
||||
|
||||
/* Define to 1 if you have the <ieeefp.h> header file. */
|
||||
/* #undef HAVE_IEEEFP_H */
|
||||
|
||||
/* Define to 1 if you have the <ifaddrs.h> header file. */
|
||||
#define HAVE_IFADDRS_H 1
|
||||
|
||||
/* Define to 1 if you have the `inet_aton' function. */
|
||||
#define HAVE_INET_ATON 1
|
||||
|
||||
/* Define to 1 if you have the `inet_pton' function. */
|
||||
#define HAVE_INET_PTON 1
|
||||
|
||||
/* Define to 1 if the system has the type `int64'. */
|
||||
/* #undef HAVE_INT64 */
|
||||
|
||||
/* Define to 1 if the system has the type `int8'. */
|
||||
/* #undef HAVE_INT8 */
|
||||
|
||||
/* Define to 1 if the system has the type `intptr_t'. */
|
||||
#define HAVE_INTPTR_T 1
|
||||
|
||||
/* Define to 1 if you have the <inttypes.h> header file. */
|
||||
#define HAVE_INTTYPES_H 1
|
||||
|
||||
/* Define to 1 if you have the global variable 'int opterr'. */
|
||||
#define HAVE_INT_OPTERR 1
|
||||
|
||||
/* Define to 1 if you have the global variable 'int optreset'. */
|
||||
/* #undef HAVE_INT_OPTRESET */
|
||||
|
||||
/* Define to 1 if you have the global variable 'int timezone'. */
|
||||
#define HAVE_INT_TIMEZONE 1
|
||||
|
||||
/* Define to 1 if you have isinf(). */
|
||||
#define HAVE_ISINF 1
|
||||
|
||||
/* Define to 1 if you have the <langinfo.h> header file. */
|
||||
#define HAVE_LANGINFO_H 1
|
||||
|
||||
/* Define to 1 if you have the `crypto' library (-lcrypto). */
|
||||
#define HAVE_LIBCRYPTO 1
|
||||
|
||||
/* Define to 1 if you have the `ldap' library (-lldap). */
|
||||
//#define HAVE_LIBLDAP 0
|
||||
|
||||
/* Define to 1 if you have the `m' library (-lm). */
|
||||
#define HAVE_LIBM 1
|
||||
|
||||
/* Define to 1 if you have the `pam' library (-lpam). */
|
||||
#define HAVE_LIBPAM 1
|
||||
|
||||
/* Define if you have a function readline library */
|
||||
#define HAVE_LIBREADLINE 1
|
||||
|
||||
/* Define to 1 if you have the `selinux' library (-lselinux). */
|
||||
/* #undef HAVE_LIBSELINUX */
|
||||
|
||||
/* Define to 1 if you have the `ssl' library (-lssl). */
|
||||
#define HAVE_LIBSSL 0
|
||||
|
||||
/* Define to 1 if you have the `wldap32' library (-lwldap32). */
|
||||
/* #undef HAVE_LIBWLDAP32 */
|
||||
|
||||
/* Define to 1 if you have the `xml2' library (-lxml2). */
|
||||
#define HAVE_LIBXML2 1
|
||||
|
||||
/* Define to 1 if you have the `xslt' library (-lxslt). */
|
||||
#define HAVE_LIBXSLT 1
|
||||
|
||||
/* Define to 1 if you have the `z' library (-lz). */
|
||||
#define HAVE_LIBZ 1
|
||||
|
||||
/* Define to 1 if you have the `zstd' library (-lzstd). */
|
||||
/* #undef HAVE_LIBZSTD */
|
||||
|
||||
/* Define to 1 if constants of type 'long long int' should have the suffix LL.
|
||||
*/
|
||||
#define HAVE_LL_CONSTANTS 1
|
||||
|
||||
/* Define to 1 if the system has the type `locale_t'. */
|
||||
#define HAVE_LOCALE_T 1
|
||||
|
||||
/* Define to 1 if `long int' works and is 64 bits. */
|
||||
/* #undef HAVE_LONG_INT_64 */
|
||||
|
||||
/* Define to 1 if the system has the type `long long int'. */
|
||||
#define HAVE_LONG_LONG_INT 1
|
||||
|
||||
/* Define to 1 if `long long int' works and is 64 bits. */
|
||||
#define HAVE_LONG_LONG_INT_64 1
|
||||
|
||||
/* Define to 1 if you have the <mbarrier.h> header file. */
|
||||
/* #undef HAVE_MBARRIER_H */
|
||||
|
||||
/* Define to 1 if you have the `mbstowcs_l' function. */
|
||||
/* #undef HAVE_MBSTOWCS_L */
|
||||
|
||||
/* Define to 1 if you have the `memmove' function. */
|
||||
#define HAVE_MEMMOVE 1
|
||||
|
||||
/* Define to 1 if you have the <memory.h> header file. */
|
||||
#define HAVE_MEMORY_H 1
|
||||
|
||||
/* Define to 1 if you have the `mkdtemp' function. */
|
||||
#define HAVE_MKDTEMP 1
|
||||
|
||||
/* Define to 1 if you have the <net/if.h> header file. */
|
||||
#define HAVE_NET_IF_H 1
|
||||
|
||||
/* Define to 1 if you have the <ossp/uuid.h> header file. */
|
||||
/* #undef HAVE_OSSP_UUID_H */
|
||||
|
||||
/* Define to 1 if you have the <pam/pam_appl.h> header file. */
|
||||
/* #undef HAVE_PAM_PAM_APPL_H */
|
||||
|
||||
/* Define to 1 if you have the `posix_fadvise' function. */
|
||||
#define HAVE_POSIX_FADVISE 1
|
||||
|
||||
/* Define to 1 if you have the declaration of `preadv', and to 0 if you don't. */
|
||||
/* #undef HAVE_DECL_PREADV */
|
||||
|
||||
/* Define to 1 if you have the declaration of `pwritev', and to 0 if you don't. */
|
||||
/* #define HAVE_DECL_PWRITEV */
|
||||
|
||||
/* Define to 1 if you have the `X509_get_signature_info' function. */
|
||||
/* #undef HAVE_X509_GET_SIGNATURE_INFO */
|
||||
|
||||
/* Define to 1 if you have the POSIX signal interface. */
|
||||
#define HAVE_POSIX_SIGNALS 1
|
||||
|
||||
/* Define to 1 if the assembler supports PPC's LWARX mutex hint bit. */
|
||||
/* #undef HAVE_PPC_LWARX_MUTEX_HINT */
|
||||
|
||||
/* Define to 1 if you have the `pthread_is_threaded_np' function. */
|
||||
/* #undef HAVE_PTHREAD_IS_THREADED_NP */
|
||||
|
||||
/* Define to 1 if you have the <pwd.h> header file. */
|
||||
#define HAVE_PWD_H 1
|
||||
|
||||
/* Define to 1 if you have the <readline.h> header file. */
|
||||
/* #undef HAVE_READLINE_H */
|
||||
|
||||
/* Define to 1 if you have the <readline/history.h> header file. */
|
||||
#define HAVE_READLINE_HISTORY_H 1
|
||||
|
||||
/* Define to 1 if you have the <readline/readline.h> header file. */
|
||||
/* #undef HAVE_READLINE_READLINE_H */
|
||||
|
||||
/* Define to 1 if you have the `rint' function. */
|
||||
#define HAVE_RINT 1
|
||||
|
||||
/* Define to 1 if you have the `rl_completion_matches' function. */
|
||||
#define HAVE_RL_COMPLETION_MATCHES 1
|
||||
|
||||
/* Define to 1 if you have the `rl_filename_completion_function' function. */
|
||||
#define HAVE_RL_FILENAME_COMPLETION_FUNCTION 1
|
||||
|
||||
/* Define to 1 if you have the `rl_reset_screen_size' function. */
|
||||
/* #undef HAVE_RL_RESET_SCREEN_SIZE */
|
||||
|
||||
/* Define to 1 if you have the `rl_variable_bind' function. */
|
||||
#define HAVE_RL_VARIABLE_BIND 1
|
||||
|
||||
/* Define to 1 if you have the <security/pam_appl.h> header file. */
|
||||
#define HAVE_SECURITY_PAM_APPL_H 1
|
||||
|
||||
/* Define to 1 if you have the `setproctitle' function. */
|
||||
/* #undef HAVE_SETPROCTITLE */
|
||||
|
||||
/* Define to 1 if the system has the type `socklen_t'. */
|
||||
#define HAVE_SOCKLEN_T 1
|
||||
|
||||
/* Define to 1 if you have the `sigprocmask' function. */
|
||||
#define HAVE_SIGPROCMASK 1
|
||||
|
||||
/* Define to 1 if you have sigsetjmp(). */
|
||||
#define HAVE_SIGSETJMP 1
|
||||
|
||||
/* Define to 1 if the system has the type `sig_atomic_t'. */
|
||||
#define HAVE_SIG_ATOMIC_T 1
|
||||
|
||||
/* Define to 1 if you have the `snprintf' function. */
|
||||
#define HAVE_SNPRINTF 1
|
||||
|
||||
/* Define to 1 if you have spinlocks. */
|
||||
#define HAVE_SPINLOCKS 1
|
||||
|
||||
/* Define to 1 if you have the `SSL_CTX_set_cert_cb' function. */
|
||||
#define HAVE_SSL_CTX_SET_CERT_CB 1
|
||||
|
||||
/* Define to 1 if you have the `SSL_CTX_set_num_tickets' function. */
|
||||
/* #define HAVE_SSL_CTX_SET_NUM_TICKETS */
|
||||
|
||||
/* Define to 1 if you have the `SSL_get_current_compression' function. */
|
||||
#define HAVE_SSL_GET_CURRENT_COMPRESSION 0
|
||||
|
||||
/* Define to 1 if you have the <stdint.h> header file. */
|
||||
#define HAVE_STDINT_H 1
|
||||
|
||||
/* Define to 1 if you have the <stdlib.h> header file. */
|
||||
#define HAVE_STDLIB_H 1
|
||||
|
||||
/* Define to 1 if you have the `strerror' function. */
|
||||
#define HAVE_STRERROR 1
|
||||
|
||||
/* Define to 1 if you have the `strerror_r' function. */
|
||||
#define HAVE_STRERROR_R 1
|
||||
|
||||
/* Define to 1 if you have the <strings.h> header file. */
|
||||
//#define HAVE_STRINGS_H 1
|
||||
|
||||
/* Define to 1 if you have the <string.h> header file. */
|
||||
#define HAVE_STRING_H 1
|
||||
|
||||
/* Define to 1 if you have the `strlcat' function. */
|
||||
/* #undef HAVE_STRLCAT */
|
||||
|
||||
/* Define to 1 if you have the `strlcpy' function. */
|
||||
/* #undef HAVE_STRLCPY */
|
||||
|
||||
#if (!OS_DARWIN)
|
||||
#define HAVE_STRCHRNUL 1
|
||||
#endif
|
||||
|
||||
/* Define to 1 if the system has the type `struct option'. */
|
||||
#define HAVE_STRUCT_OPTION 1
|
||||
|
||||
/* Define to 1 if `sa_len' is a member of `struct sockaddr'. */
|
||||
/* #undef HAVE_STRUCT_SOCKADDR_SA_LEN */
|
||||
|
||||
/* Define to 1 if `tm_zone' is a member of `struct tm'. */
|
||||
#define HAVE_STRUCT_TM_TM_ZONE 1
|
||||
|
||||
/* Define to 1 if you have the `sync_file_range' function. */
|
||||
/* #undef HAVE_SYNC_FILE_RANGE */
|
||||
|
||||
/* Define to 1 if you have the syslog interface. */
|
||||
#define HAVE_SYSLOG 1
|
||||
|
||||
/* Define to 1 if you have the <sys/ioctl.h> header file. */
|
||||
#define HAVE_SYS_IOCTL_H 1
|
||||
|
||||
/* Define to 1 if you have the <sys/personality.h> header file. */
|
||||
/* #undef HAVE_SYS_PERSONALITY_H */
|
||||
|
||||
/* Define to 1 if you have the <sys/poll.h> header file. */
|
||||
#define HAVE_SYS_POLL_H 1
|
||||
|
||||
/* Define to 1 if you have the <sys/signalfd.h> header file. */
|
||||
/* #undef HAVE_SYS_SIGNALFD_H */
|
||||
|
||||
/* Define to 1 if you have the <sys/socket.h> header file. */
|
||||
#define HAVE_SYS_SOCKET_H 1
|
||||
|
||||
/* Define to 1 if you have the <sys/stat.h> header file. */
|
||||
#define HAVE_SYS_STAT_H 1
|
||||
|
||||
/* Define to 1 if you have the <sys/time.h> header file. */
|
||||
#define HAVE_SYS_TIME_H 1
|
||||
|
||||
/* Define to 1 if you have the <sys/types.h> header file. */
|
||||
#define HAVE_SYS_TYPES_H 1
|
||||
|
||||
/* Define to 1 if you have the <sys/ucred.h> header file. */
|
||||
#if (OS_DARWIN || OS_FREEBSD)
|
||||
#define HAVE_SYS_UCRED_H 1
|
||||
#endif
|
||||
|
||||
/* Define to 1 if you have the <sys/un.h> header file. */
|
||||
#define _GNU_SOURCE 1 /* Needed for glibc struct ucred */
|
||||
|
||||
/* Define to 1 if you have the <termios.h> header file. */
|
||||
#define HAVE_TERMIOS_H 1
|
||||
|
||||
/* Define to 1 if your `struct tm' has `tm_zone'. Deprecated, use
|
||||
`HAVE_STRUCT_TM_TM_ZONE' instead. */
|
||||
#define HAVE_TM_ZONE 1
|
||||
|
||||
/* Define to 1 if you have the `towlower' function. */
|
||||
#define HAVE_TOWLOWER 1
|
||||
|
||||
/* Define to 1 if you have the external array `tzname'. */
|
||||
#define HAVE_TZNAME 1
|
||||
|
||||
/* Define to 1 if you have the <ucred.h> header file. */
|
||||
/* #undef HAVE_UCRED_H */
|
||||
|
||||
/* Define to 1 if the system has the type `uint64'. */
|
||||
/* #undef HAVE_UINT64 */
|
||||
|
||||
/* Define to 1 if the system has the type `uint8'. */
|
||||
/* #undef HAVE_UINT8 */
|
||||
|
||||
/* Define to 1 if the system has the type `uintptr_t'. */
|
||||
#define HAVE_UINTPTR_T 1
|
||||
|
||||
/* Define to 1 if the system has the type `union semun'. */
|
||||
/* #undef HAVE_UNION_SEMUN */
|
||||
|
||||
/* Define to 1 if you have the <unistd.h> header file. */
|
||||
#define HAVE_UNISTD_H 1
|
||||
|
||||
/* Define to 1 if you have unix sockets. */
|
||||
#define HAVE_UNIX_SOCKETS 1
|
||||
|
||||
/* Define to 1 if the system has the type `unsigned long long int'. */
|
||||
#define HAVE_UNSIGNED_LONG_LONG_INT 1
|
||||
|
||||
/* Define to 1 if you have the `utime' function. */
|
||||
#define HAVE_UTIME 1
|
||||
|
||||
/* Define to 1 if you have the `utimes' function. */
|
||||
#define HAVE_UTIMES 1
|
||||
|
||||
/* Define to 1 if you have the <utime.h> header file. */
|
||||
#define HAVE_UTIME_H 1
|
||||
|
||||
/* Define to 1 if you have BSD UUID support. */
|
||||
/* #undef HAVE_UUID_BSD */
|
||||
|
||||
/* Define to 1 if you have E2FS UUID support. */
|
||||
/* #undef HAVE_UUID_E2FS */
|
||||
|
||||
/* Define to 1 if you have the <uuid.h> header file. */
|
||||
#define HAVE_UUID_H 1
|
||||
|
||||
/* Define to 1 if you have OSSP UUID support. */
|
||||
#define HAVE_UUID_OSSP 1
|
||||
|
||||
/* Define to 1 if you have the <uuid/uuid.h> header file. */
|
||||
/* #undef HAVE_UUID_UUID_H */
|
||||
|
||||
/* Define to 1 if your compiler knows the visibility("hidden") attribute. */
|
||||
/* #undef HAVE_VISIBILITY_ATTRIBUTE */
|
||||
|
||||
/* Define to 1 if you have the `vsnprintf' function. */
|
||||
#define HAVE_VSNPRINTF 1
|
||||
|
||||
/* Define to 1 if you have the <wchar.h> header file. */
|
||||
#define HAVE_WCHAR_H 1
|
||||
|
||||
/* Define to 1 if you have the `wcstombs' function. */
|
||||
#define HAVE_WCSTOMBS 1
|
||||
|
||||
/* Define to 1 if you have the `wcstombs_l' function. */
|
||||
/* #undef HAVE_WCSTOMBS_L */
|
||||
|
||||
/* Define to 1 if your compiler understands __builtin_bswap32. */
|
||||
/* #undef HAVE__BUILTIN_BSWAP32 */
|
||||
|
||||
/* Define to 1 if your compiler understands __builtin_constant_p. */
|
||||
#define HAVE__BUILTIN_CONSTANT_P 1
|
||||
|
||||
/* Define to 1 if your compiler understands __builtin_frame_address. */
|
||||
/* #undef HAVE__BUILTIN_FRAME_ADDRESS */
|
||||
|
||||
/* Define to 1 if your compiler understands __builtin_types_compatible_p. */
|
||||
#define HAVE__BUILTIN_TYPES_COMPATIBLE_P 1
|
||||
|
||||
/* Define to 1 if your compiler understands __builtin_unreachable. */
|
||||
/* #undef HAVE__BUILTIN_UNREACHABLE */
|
||||
|
||||
/* Define to 1 if you have __cpuid. */
|
||||
/* #undef HAVE__CPUID */
|
||||
|
||||
/* Define to 1 if you have __get_cpuid. */
|
||||
/* #undef HAVE__GET_CPUID */
|
||||
|
||||
/* Define to 1 if your compiler understands _Static_assert. */
|
||||
/* #undef HAVE__STATIC_ASSERT */
|
||||
|
||||
/* Define to 1 if your compiler understands __VA_ARGS__ in macros. */
|
||||
#define HAVE__VA_ARGS 1
|
||||
|
||||
/* Define to the appropriate snprintf length modifier for 64-bit ints. */
|
||||
#define INT64_MODIFIER "ll"
|
||||
|
||||
/* Define to 1 if `locale_t' requires <xlocale.h>. */
|
||||
/* #undef LOCALE_T_IN_XLOCALE */
|
||||
|
||||
/* Define as the maximum alignment requirement of any C data type. */
|
||||
#define MAXIMUM_ALIGNOF 4
|
||||
|
||||
/* Define bytes to use libc memset(). */
|
||||
#define MEMSET_LOOP_LIMIT 1024
|
||||
|
||||
/* Define to the address where bug reports for this package should be sent. */
|
||||
#define PACKAGE_BUGREPORT "pgsql-bugs@postgresql.org"
|
||||
|
||||
/* Define to the full name of this package. */
|
||||
#define PACKAGE_NAME "PostgreSQL"
|
||||
|
||||
/* Define to the full name and version of this package. */
|
||||
#define PACKAGE_STRING "PostgreSQL 9.5.4"
|
||||
|
||||
/* Define to the one symbol short name of this package. */
|
||||
#define PACKAGE_TARNAME "postgresql"
|
||||
|
||||
/* Define to the home page for this package. */
|
||||
#define PACKAGE_URL ""
|
||||
|
||||
/* Define to the version of this package. */
|
||||
#define PACKAGE_VERSION "9.5.4"
|
||||
|
||||
/* Define to the name of a signed 128-bit integer type. */
|
||||
/* #undef PG_INT128_TYPE */
|
||||
|
||||
/* Define to the name of a signed 64-bit integer type. */
|
||||
#define PG_INT64_TYPE long long int
|
||||
|
||||
/* Define to the name of the default PostgreSQL service principal in Kerberos
|
||||
(GSSAPI). (--with-krb-srvnam=NAME) */
|
||||
#define PG_KRB_SRVNAM "postgres"
|
||||
|
||||
/* PostgreSQL major version as a string */
|
||||
#define PG_MAJORVERSION "9.5"
|
||||
|
||||
/* Define to gnu_printf if compiler supports it, else printf. */
|
||||
#define PG_PRINTF_ATTRIBUTE printf
|
||||
|
||||
/* Define to 1 if "static inline" works without unwanted warnings from
|
||||
compilations where static inline functions are defined but not called. */
|
||||
#define PG_USE_INLINE 1
|
||||
|
||||
/* PostgreSQL version as a string */
|
||||
#define PG_VERSION "9.5.4"
|
||||
|
||||
/* PostgreSQL version as a number */
|
||||
#define PG_VERSION_NUM 90504
|
||||
|
||||
/* A string containing the version number, platform, and C compiler */
|
||||
#define PG_VERSION_STR "PostgreSQL 9.5.4 on i686-pc-linux-gnu, compiled by gcc (GCC) 4.1.2 20080704 (Red Hat 4.1.2-55), 32-bit"
|
||||
|
||||
/* Define to 1 to allow profiling output to be saved separately for each
|
||||
process. */
|
||||
/* #undef PROFILE_PID_DIR */
|
||||
|
||||
/* RELSEG_SIZE is the maximum number of blocks allowed in one disk file. Thus,
|
||||
the maximum size of a single file is RELSEG_SIZE * BLCKSZ; relations bigger
|
||||
than that are divided into multiple files. RELSEG_SIZE * BLCKSZ must be
|
||||
less than your OS' limit on file size. This is often 2 GB or 4GB in a
|
||||
32-bit operating system, unless you have large file support enabled. By
|
||||
default, we make the limit 1 GB to avoid any possible integer-overflow
|
||||
problems within the OS. A limit smaller than necessary only means we divide
|
||||
a large relation into more chunks than necessary, so it seems best to err
|
||||
in the direction of a small limit. A power-of-2 value is recommended to
|
||||
save a few cycles in md.c, but is not absolutely required. Changing
|
||||
RELSEG_SIZE requires an initdb. */
|
||||
#define RELSEG_SIZE 131072
|
||||
|
||||
/* The size of `long', as computed by sizeof. */
|
||||
#define SIZEOF_LONG 4
|
||||
|
||||
/* The size of `off_t', as computed by sizeof. */
|
||||
#define SIZEOF_OFF_T 8
|
||||
|
||||
/* The size of `size_t', as computed by sizeof. */
|
||||
#define SIZEOF_SIZE_T 4
|
||||
|
||||
/* The size of `void *', as computed by sizeof. */
|
||||
#define SIZEOF_VOID_P 4
|
||||
|
||||
/* Define to 1 if you have the ANSI C header files. */
|
||||
#define STDC_HEADERS 1
|
||||
|
||||
/* Define to 1 if strerror_r() returns a int. */
|
||||
/* #undef STRERROR_R_INT */
|
||||
|
||||
/* Define to 1 if your <sys/time.h> declares `struct tm'. */
|
||||
/* #undef TM_IN_SYS_TIME */
|
||||
|
||||
/* Define to 1 to build with assertion checks. (--enable-cassert) */
|
||||
/* #undef USE_ASSERT_CHECKING */
|
||||
|
||||
/* Define to 1 to build with Bonjour support. (--with-bonjour) */
|
||||
/* #undef USE_BONJOUR */
|
||||
|
||||
/* Define to 1 if you want float4 values to be passed by value.
|
||||
(--enable-float4-byval) */
|
||||
#define USE_FLOAT4_BYVAL 1
|
||||
|
||||
/* Define to 1 if you want float8, int8, etc values to be passed by value.
|
||||
(--enable-float8-byval) */
|
||||
/* #undef USE_FLOAT8_BYVAL */
|
||||
|
||||
/* Define to 1 if you want 64-bit integer timestamp and interval support.
|
||||
(--enable-integer-datetimes) */
|
||||
#define USE_INTEGER_DATETIMES 1
|
||||
|
||||
/* Define to 1 to build with LDAP support. (--with-ldap) */
|
||||
//#define USE_LDAP 0
|
||||
|
||||
/* Define to 1 to build with XML support. (--with-libxml) */
|
||||
#define USE_LIBXML 1
|
||||
|
||||
/* Define to 1 to use XSLT support when building contrib/xml2.
|
||||
(--with-libxslt) */
|
||||
#define USE_LIBXSLT 1
|
||||
|
||||
/* Define to select named POSIX semaphores. */
|
||||
/* #undef USE_NAMED_POSIX_SEMAPHORES */
|
||||
|
||||
/* Define to build with OpenSSL support. (--with-openssl) */
|
||||
#define USE_OPENSSL 0
|
||||
|
||||
#define USE_OPENSSL_RANDOM 0
|
||||
|
||||
#define FRONTEND 1
|
||||
|
||||
/* Define to 1 to build with PAM support. (--with-pam) */
|
||||
#define USE_PAM 1
|
||||
|
||||
/* Use replacement snprintf() functions. */
|
||||
/* #undef USE_REPL_SNPRINTF */
|
||||
|
||||
/* Define to 1 to use Intel SSE 4.2 CRC instructions with a runtime check. */
|
||||
#define USE_SLICING_BY_8_CRC32C 1
|
||||
|
||||
/* Define to 1 use Intel SSE 4.2 CRC instructions. */
|
||||
/* #undef USE_SSE42_CRC32C */
|
||||
|
||||
/* Define to 1 to use Intel SSSE 4.2 CRC instructions with a runtime check. */
|
||||
/* #undef USE_SSE42_CRC32C_WITH_RUNTIME_CHECK */
|
||||
|
||||
/* Define to select SysV-style semaphores. */
|
||||
#define USE_SYSV_SEMAPHORES 1
|
||||
|
||||
/* Define to select SysV-style shared memory. */
|
||||
#define USE_SYSV_SHARED_MEMORY 1
|
||||
|
||||
/* Define to select unnamed POSIX semaphores. */
|
||||
/* #undef USE_UNNAMED_POSIX_SEMAPHORES */
|
||||
|
||||
/* Define to select Win32-style semaphores. */
|
||||
/* #undef USE_WIN32_SEMAPHORES */
|
||||
|
||||
/* Define to select Win32-style shared memory. */
|
||||
/* #undef USE_WIN32_SHARED_MEMORY */
|
||||
|
||||
/* Define to 1 to build with ZSTD support. (--with-zstd) */
|
||||
/* #undef USE_ZSTD */
|
||||
|
||||
/* Define to 1 if `wcstombs_l' requires <xlocale.h>. */
|
||||
/* #undef WCSTOMBS_L_IN_XLOCALE */
|
||||
|
||||
/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most
|
||||
significant byte first (like Motorola and SPARC, unlike Intel). */
|
||||
#if defined AC_APPLE_UNIVERSAL_BUILD
|
||||
# if defined __BIG_ENDIAN__
|
||||
# define WORDS_BIGENDIAN 1
|
||||
# endif
|
||||
#else
|
||||
# ifndef WORDS_BIGENDIAN
|
||||
/* # undef WORDS_BIGENDIAN */
|
||||
# endif
|
||||
#endif
|
||||
|
||||
/* Size of a WAL file block. This need have no particular relation to BLCKSZ.
|
||||
XLOG_BLCKSZ must be a power of 2, and if your system supports O_DIRECT I/O,
|
||||
XLOG_BLCKSZ must be a multiple of the alignment requirement for direct-I/O
|
||||
buffers, else direct I/O may fail. Changing XLOG_BLCKSZ requires an initdb.
|
||||
*/
|
||||
#define XLOG_BLCKSZ 8192
|
||||
|
||||
/* XLOG_SEG_SIZE is the size of a single WAL file. This must be a power of 2
|
||||
and larger than XLOG_BLCKSZ (preferably, a great deal larger than
|
||||
XLOG_BLCKSZ). Changing XLOG_SEG_SIZE requires an initdb. */
|
||||
#define XLOG_SEG_SIZE (16 * 1024 * 1024)
|
||||
|
||||
|
||||
|
||||
/* Number of bits in a file offset, on hosts where this is settable. */
|
||||
#define _FILE_OFFSET_BITS 64
|
||||
|
||||
/* Define to 1 to make fseeko visible on some hosts (e.g. glibc 2.2). */
|
||||
/* #undef _LARGEFILE_SOURCE */
|
||||
|
||||
/* Define for large files, on AIX-style hosts. */
|
||||
/* #undef _LARGE_FILES */
|
||||
|
||||
/* Define to `__inline__' or `__inline' if that's what the C compiler
|
||||
calls it, or to nothing if 'inline' is not supported under any name. */
|
||||
#ifndef __cplusplus
|
||||
/* #undef inline */
|
||||
#endif
|
||||
|
||||
/* Define to the type of a signed integer type wide enough to hold a pointer,
|
||||
if such a type exists, and if the system does not define it. */
|
||||
/* #undef intptr_t */
|
||||
|
||||
/* Define to empty if the C compiler does not understand signed types. */
|
||||
/* #undef signed */
|
||||
|
||||
/* Define to the type of an unsigned integer type wide enough to hold a
|
||||
pointer, if such a type exists, and if the system does not define it. */
|
||||
/* #undef uintptr_t */
|
7
contrib/postgres-cmake/pg_config_ext.h
Normal file
7
contrib/postgres-cmake/pg_config_ext.h
Normal file
@ -0,0 +1,7 @@
|
||||
/*
|
||||
* * src/include/pg_config_ext.h.in. This is generated manually, not by
|
||||
* * autoheader, since we want to limit which symbols get defined here.
|
||||
* */
|
||||
|
||||
/* Define to the name of a signed 64-bit integer type. */
|
||||
#define PG_INT64_TYPE long long int
|
34
contrib/postgres-cmake/pg_config_os.h
Normal file
34
contrib/postgres-cmake/pg_config_os.h
Normal file
@ -0,0 +1,34 @@
|
||||
#if defined(OS_DARWIN)
|
||||
|
||||
/* src/include/port/darwin.h */
|
||||
#define __darwin__ 1
|
||||
|
||||
#if HAVE_DECL_F_FULLFSYNC /* not present before macOS 10.3 */
|
||||
#define HAVE_FSYNC_WRITETHROUGH
|
||||
#endif
|
||||
|
||||
#else
|
||||
/* src/include/port/linux.h */
|
||||
/*
|
||||
* As of July 2007, all known versions of the Linux kernel will sometimes
|
||||
* return EIDRM for a shmctl() operation when EINVAL is correct (it happens
|
||||
* when the low-order 15 bits of the supplied shm ID match the slot number
|
||||
* assigned to a newer shmem segment). We deal with this by assuming that
|
||||
* EIDRM means EINVAL in PGSharedMemoryIsInUse(). This is reasonably safe
|
||||
* since in fact Linux has no excuse for ever returning EIDRM; it doesn't
|
||||
* track removed segments in a way that would allow distinguishing them from
|
||||
* private ones. But someday that code might get upgraded, and we'd have
|
||||
* to have a kernel version test here.
|
||||
*/
|
||||
#define HAVE_LINUX_EIDRM_BUG
|
||||
|
||||
/*
|
||||
* Set the default wal_sync_method to fdatasync. With recent Linux versions,
|
||||
* xlogdefs.h's normal rules will prefer open_datasync, which (a) doesn't
|
||||
* perform better and (b) causes outright failures on ext4 data=journal
|
||||
* filesystems, because those don't support O_DIRECT.
|
||||
*/
|
||||
#define PLATFORM_DEFAULT_SYNC_METHOD SYNC_METHOD_FDATASYNC
|
||||
|
||||
#endif
|
||||
|
12
contrib/postgres-cmake/pg_config_paths.h
Normal file
12
contrib/postgres-cmake/pg_config_paths.h
Normal file
@ -0,0 +1,12 @@
|
||||
#define PGBINDIR "/bin"
|
||||
#define PGSHAREDIR "/share"
|
||||
#define SYSCONFDIR "/etc"
|
||||
#define INCLUDEDIR "/include"
|
||||
#define PKGINCLUDEDIR "/include"
|
||||
#define INCLUDEDIRSERVER "/include/server"
|
||||
#define LIBDIR "/lib"
|
||||
#define PKGLIBDIR "/lib"
|
||||
#define LOCALEDIR "/share/locale"
|
||||
#define DOCDIR "/doc"
|
||||
#define HTMLDIR "/doc"
|
||||
#define MANDIR "/man"
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user