mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-21 23:21:59 +00:00
Merge branch 'master' into remove-useless-install
This commit is contained in:
commit
241cc2abf4
@ -21,7 +21,6 @@ ConstructorInitializerAllOnOneLineOrOnePerLine: true
|
|||||||
ExperimentalAutoDetectBinPacking: true
|
ExperimentalAutoDetectBinPacking: true
|
||||||
UseTab: Never
|
UseTab: Never
|
||||||
TabWidth: 4
|
TabWidth: 4
|
||||||
IndentWidth: 4
|
|
||||||
Standard: Cpp11
|
Standard: Cpp11
|
||||||
PointerAlignment: Middle
|
PointerAlignment: Middle
|
||||||
MaxEmptyLinesToKeep: 2
|
MaxEmptyLinesToKeep: 2
|
||||||
@ -75,11 +74,12 @@ ConstructorInitializerIndentWidth: 4
|
|||||||
ContinuationIndentWidth: 4
|
ContinuationIndentWidth: 4
|
||||||
DerivePointerAlignment: false
|
DerivePointerAlignment: false
|
||||||
DisableFormat: false
|
DisableFormat: false
|
||||||
|
IndentRequiresClause: false
|
||||||
IndentWidth: 4
|
IndentWidth: 4
|
||||||
IndentWrappedFunctionNames: false
|
IndentWrappedFunctionNames: false
|
||||||
MacroBlockBegin: ''
|
MacroBlockBegin: ''
|
||||||
MacroBlockEnd: ''
|
MacroBlockEnd: ''
|
||||||
NamespaceIndentation: Inner
|
NamespaceIndentation: None
|
||||||
ObjCBlockIndentWidth: 4
|
ObjCBlockIndentWidth: 4
|
||||||
ObjCSpaceAfterProperty: true
|
ObjCSpaceAfterProperty: true
|
||||||
ObjCSpaceBeforeProtocolList: true
|
ObjCSpaceBeforeProtocolList: true
|
||||||
@ -89,6 +89,7 @@ PenaltyBreakFirstLessLess: 120
|
|||||||
PenaltyBreakString: 1000
|
PenaltyBreakString: 1000
|
||||||
PenaltyExcessCharacter: 1000000
|
PenaltyExcessCharacter: 1000000
|
||||||
PenaltyReturnTypeOnItsOwnLine: 60
|
PenaltyReturnTypeOnItsOwnLine: 60
|
||||||
|
RemoveBracesLLVM: true
|
||||||
SpaceAfterCStyleCast: false
|
SpaceAfterCStyleCast: false
|
||||||
SpaceBeforeAssignmentOperators: true
|
SpaceBeforeAssignmentOperators: true
|
||||||
SpaceBeforeParens: ControlStatements
|
SpaceBeforeParens: ControlStatements
|
||||||
|
352
.clang-tidy
352
.clang-tidy
@ -1,217 +1,163 @@
|
|||||||
Checks: '-*,
|
# To run clang-tidy from CMake, build ClickHouse with -DENABLE_CLANG_TIDY=1. To show all warnings, it is
|
||||||
misc-misplaced-const,
|
# recommended to pass "-k0" to Ninja.
|
||||||
misc-redundant-expression,
|
|
||||||
misc-static-assert,
|
|
||||||
misc-throw-by-value-catch-by-reference,
|
|
||||||
misc-unconventional-assign-operator,
|
|
||||||
misc-uniqueptr-reset-release,
|
|
||||||
misc-unused-alias-decls,
|
|
||||||
misc-unused-parameters,
|
|
||||||
misc-unused-using-decls,
|
|
||||||
|
|
||||||
modernize-avoid-bind,
|
# Enable all checks + disable selected checks. Feel free to remove disabled checks from below list if
|
||||||
modernize-loop-convert,
|
# a) the new check is not controversial (this includes many checks in readability-* and google-*) or
|
||||||
modernize-make-shared,
|
# b) too noisy (checks with > 100 new warnings are considered noisy, this includes e.g. cppcoreguidelines-*).
|
||||||
modernize-make-unique,
|
|
||||||
modernize-raw-string-literal,
|
|
||||||
modernize-redundant-void-arg,
|
|
||||||
modernize-replace-random-shuffle,
|
|
||||||
modernize-use-bool-literals,
|
|
||||||
modernize-use-equals-default,
|
|
||||||
modernize-use-equals-delete,
|
|
||||||
modernize-use-nullptr,
|
|
||||||
modernize-use-using,
|
|
||||||
|
|
||||||
performance-faster-string-find,
|
# TODO: Once clang(-tidy) 17 is the minimum, we can convert this list to YAML
|
||||||
performance-for-range-copy,
|
# See https://releases.llvm.org/17.0.1/tools/clang/tools/extra/docs/ReleaseNotes.html#improvements-to-clang-tidy
|
||||||
performance-implicit-conversion-in-loop,
|
|
||||||
performance-inefficient-algorithm,
|
|
||||||
performance-inefficient-vector-operation,
|
|
||||||
performance-move-const-arg,
|
|
||||||
performance-move-constructor-init,
|
|
||||||
performance-no-automatic-move,
|
|
||||||
performance-noexcept-move-constructor,
|
|
||||||
performance-trivially-destructible,
|
|
||||||
performance-unnecessary-copy-initialization,
|
|
||||||
|
|
||||||
readability-avoid-const-params-in-decls,
|
# TODO Let clang-tidy check headers in further directories
|
||||||
readability-const-return-type,
|
# --> HeaderFilterRegex: '^.*/(src|base|programs|utils)/.*(h|hpp)$'
|
||||||
readability-container-contains,
|
HeaderFilterRegex: '^.*/(base)/.*(h|hpp)$'
|
||||||
readability-container-size-empty,
|
|
||||||
readability-convert-member-functions-to-static,
|
|
||||||
readability-delete-null-pointer,
|
|
||||||
readability-deleted-default,
|
|
||||||
readability-identifier-naming,
|
|
||||||
readability-inconsistent-declaration-parameter-name,
|
|
||||||
readability-make-member-function-const,
|
|
||||||
readability-misplaced-array-index,
|
|
||||||
readability-non-const-parameter,
|
|
||||||
readability-qualified-auto,
|
|
||||||
readability-redundant-access-specifiers,
|
|
||||||
readability-redundant-control-flow,
|
|
||||||
readability-redundant-function-ptr-dereference,
|
|
||||||
readability-redundant-member-init,
|
|
||||||
readability-redundant-smartptr-get,
|
|
||||||
readability-redundant-string-cstr,
|
|
||||||
readability-redundant-string-init,
|
|
||||||
readability-simplify-boolean-expr,
|
|
||||||
readability-simplify-subscript-expr,
|
|
||||||
readability-static-definition-in-anonymous-namespace,
|
|
||||||
readability-string-compare,
|
|
||||||
readability-uniqueptr-delete-release,
|
|
||||||
|
|
||||||
bugprone-argument-comment,
|
Checks: '*,
|
||||||
bugprone-bad-signal-to-kill-thread,
|
-abseil-*,
|
||||||
bugprone-bool-pointer-implicit-conversion,
|
|
||||||
bugprone-copy-constructor-init,
|
|
||||||
bugprone-dangling-handle,
|
|
||||||
bugprone-fold-init-type,
|
|
||||||
bugprone-forward-declaration-namespace,
|
|
||||||
bugprone-inaccurate-erase,
|
|
||||||
bugprone-incorrect-roundings,
|
|
||||||
bugprone-infinite-loop,
|
|
||||||
bugprone-integer-division,
|
|
||||||
bugprone-macro-parentheses,
|
|
||||||
bugprone-macro-repeated-side-effects,
|
|
||||||
bugprone-misplaced-operator-in-strlen-in-alloc,
|
|
||||||
bugprone-misplaced-pointer-artithmetic-in-alloc,
|
|
||||||
bugprone-misplaced-widening-cast,
|
|
||||||
bugprone-move-forwarding-reference,
|
|
||||||
bugprone-multiple-statement-macro,
|
|
||||||
bugprone-parent-virtual-call,
|
|
||||||
bugprone-posix-return,
|
|
||||||
bugprone-reserved-identifier,
|
|
||||||
bugprone-signed-char-misuse,
|
|
||||||
bugprone-sizeof-container,
|
|
||||||
bugprone-sizeof-expression,
|
|
||||||
bugprone-string-constructor,
|
|
||||||
bugprone-string-integer-assignment,
|
|
||||||
bugprone-string-literal-with-embedded-nul,
|
|
||||||
bugprone-suspicious-enum-usage,
|
|
||||||
bugprone-suspicious-include,
|
|
||||||
bugprone-suspicious-memset-usage,
|
|
||||||
bugprone-suspicious-missing-comma,
|
|
||||||
bugprone-suspicious-string-compare,
|
|
||||||
bugprone-swapped-arguments,
|
|
||||||
bugprone-terminating-continue,
|
|
||||||
bugprone-throw-keyword-missing,
|
|
||||||
bugprone-too-small-loop-variable,
|
|
||||||
bugprone-undefined-memory-manipulation,
|
|
||||||
bugprone-undelegated-constructor,
|
|
||||||
bugprone-unhandled-self-assignment,
|
|
||||||
bugprone-unused-raii,
|
|
||||||
bugprone-unused-return-value,
|
|
||||||
bugprone-use-after-move,
|
|
||||||
bugprone-virtual-near-miss,
|
|
||||||
|
|
||||||
cert-dcl21-cpp,
|
-altera-*,
|
||||||
cert-dcl50-cpp,
|
|
||||||
cert-env33-c,
|
|
||||||
cert-err34-c,
|
|
||||||
cert-err52-cpp,
|
|
||||||
cert-flp30-c,
|
|
||||||
cert-mem57-cpp,
|
|
||||||
cert-msc50-cpp,
|
|
||||||
cert-oop58-cpp,
|
|
||||||
|
|
||||||
google-build-explicit-make-pair,
|
-android-*,
|
||||||
google-build-namespaces,
|
|
||||||
google-default-arguments,
|
|
||||||
google-explicit-constructor,
|
|
||||||
google-readability-avoid-underscore-in-googletest-name,
|
|
||||||
google-readability-casting,
|
|
||||||
google-runtime-int,
|
|
||||||
google-runtime-operator,
|
|
||||||
|
|
||||||
hicpp-exception-baseclass,
|
-bugprone-assignment-in-if-condition,
|
||||||
|
-bugprone-branch-clone,
|
||||||
|
-bugprone-easily-swappable-parameters,
|
||||||
|
-bugprone-exception-escape,
|
||||||
|
-bugprone-implicit-widening-of-multiplication-result,
|
||||||
|
-bugprone-narrowing-conversions,
|
||||||
|
-bugprone-not-null-terminated-result,
|
||||||
|
-bugprone-reserved-identifier, # useful but too slow, TODO retry when https://reviews.llvm.org/rG1c282052624f9d0bd273bde0b47b30c96699c6c7 is merged
|
||||||
|
-bugprone-unchecked-optional-access,
|
||||||
|
|
||||||
clang-analyzer-core.CallAndMessage,
|
-cert-dcl16-c,
|
||||||
clang-analyzer-core.DivideZero,
|
-cert-dcl37-c,
|
||||||
clang-analyzer-core.NonNullParamChecker,
|
-cert-dcl51-cpp,
|
||||||
clang-analyzer-core.NullDereference,
|
-cert-err58-cpp,
|
||||||
clang-analyzer-core.StackAddressEscape,
|
-cert-msc32-c,
|
||||||
clang-analyzer-core.UndefinedBinaryOperatorResult,
|
-cert-msc51-cpp,
|
||||||
clang-analyzer-core.VLASize,
|
-cert-oop54-cpp,
|
||||||
clang-analyzer-core.uninitialized.ArraySubscript,
|
-cert-oop57-cpp,
|
||||||
clang-analyzer-core.uninitialized.Assign,
|
|
||||||
clang-analyzer-core.uninitialized.Branch,
|
|
||||||
clang-analyzer-core.uninitialized.CapturedBlockVariable,
|
|
||||||
clang-analyzer-core.uninitialized.UndefReturn,
|
|
||||||
clang-analyzer-cplusplus.InnerPointer,
|
|
||||||
clang-analyzer-cplusplus.Move,
|
|
||||||
clang-analyzer-cplusplus.NewDelete,
|
|
||||||
clang-analyzer-cplusplus.NewDeleteLeaks,
|
|
||||||
clang-analyzer-cplusplus.PlacementNewChecker,
|
|
||||||
clang-analyzer-cplusplus.SelfAssignment,
|
|
||||||
clang-analyzer-deadcode.DeadStores,
|
|
||||||
clang-analyzer-optin.cplusplus.UninitializedObject,
|
|
||||||
clang-analyzer-optin.cplusplus.VirtualCall,
|
|
||||||
clang-analyzer-security.insecureAPI.UncheckedReturn,
|
|
||||||
clang-analyzer-security.insecureAPI.bcmp,
|
|
||||||
clang-analyzer-security.insecureAPI.bcopy,
|
|
||||||
clang-analyzer-security.insecureAPI.bzero,
|
|
||||||
clang-analyzer-security.insecureAPI.getpw,
|
|
||||||
clang-analyzer-security.insecureAPI.gets,
|
|
||||||
clang-analyzer-security.insecureAPI.mkstemp,
|
|
||||||
clang-analyzer-security.insecureAPI.mktemp,
|
|
||||||
clang-analyzer-security.insecureAPI.rand,
|
|
||||||
clang-analyzer-security.insecureAPI.strcpy,
|
|
||||||
clang-analyzer-unix.Malloc,
|
|
||||||
clang-analyzer-unix.MallocSizeof,
|
|
||||||
clang-analyzer-unix.MismatchedDeallocator,
|
|
||||||
clang-analyzer-unix.Vfork,
|
|
||||||
clang-analyzer-unix.cstring.BadSizeArg,
|
|
||||||
clang-analyzer-unix.cstring.NullArg,
|
|
||||||
|
|
||||||
boost-use-to-string,
|
-clang-analyzer-unix.Malloc,
|
||||||
|
|
||||||
alpha.security.cert.env.InvalidPtr,
|
-cppcoreguidelines-*, # impractical in a codebase as large as ClickHouse, also slow
|
||||||
|
|
||||||
|
-darwin-*,
|
||||||
|
|
||||||
|
-fuchsia-*,
|
||||||
|
|
||||||
|
-google-build-using-namespace,
|
||||||
|
-google-readability-braces-around-statements,
|
||||||
|
-google-readability-casting,
|
||||||
|
-google-readability-function-size,
|
||||||
|
-google-readability-namespace-comments,
|
||||||
|
-google-readability-todo,
|
||||||
|
|
||||||
|
-hicpp-avoid-c-arrays,
|
||||||
|
-hicpp-avoid-goto,
|
||||||
|
-hicpp-braces-around-statements,
|
||||||
|
-hicpp-explicit-conversions,
|
||||||
|
-hicpp-function-size,
|
||||||
|
-hicpp-member-init,
|
||||||
|
-hicpp-move-const-arg,
|
||||||
|
-hicpp-multiway-paths-covered,
|
||||||
|
-hicpp-named-parameter,
|
||||||
|
-hicpp-no-array-decay,
|
||||||
|
-hicpp-no-assembler,
|
||||||
|
-hicpp-no-malloc,
|
||||||
|
-hicpp-signed-bitwise,
|
||||||
|
-hicpp-special-member-functions,
|
||||||
|
-hicpp-uppercase-literal-suffix,
|
||||||
|
-hicpp-use-auto,
|
||||||
|
-hicpp-use-emplace,
|
||||||
|
-hicpp-vararg,
|
||||||
|
|
||||||
|
-linuxkernel-*,
|
||||||
|
|
||||||
|
-llvm-*,
|
||||||
|
|
||||||
|
-llvmlibc-*,
|
||||||
|
|
||||||
|
-openmp-*,
|
||||||
|
|
||||||
|
-misc-const-correctness,
|
||||||
|
-misc-include-cleaner, # useful but far too many occurrences
|
||||||
|
-misc-no-recursion,
|
||||||
|
-misc-non-private-member-variables-in-classes,
|
||||||
|
-misc-confusable-identifiers, # useful but slooow
|
||||||
|
-misc-use-anonymous-namespace,
|
||||||
|
|
||||||
|
-modernize-avoid-c-arrays,
|
||||||
|
-modernize-concat-nested-namespaces,
|
||||||
|
-modernize-macro-to-enum,
|
||||||
|
-modernize-pass-by-value,
|
||||||
|
-modernize-return-braced-init-list,
|
||||||
|
-modernize-use-auto,
|
||||||
|
-modernize-use-default-member-init,
|
||||||
|
-modernize-use-emplace,
|
||||||
|
-modernize-use-nodiscard,
|
||||||
|
-modernize-use-override,
|
||||||
|
-modernize-use-trailing-return-type,
|
||||||
|
|
||||||
|
-performance-inefficient-string-concatenation,
|
||||||
|
-performance-no-int-to-ptr,
|
||||||
|
-performance-avoid-endl,
|
||||||
|
-performance-unnecessary-value-param,
|
||||||
|
|
||||||
|
-portability-simd-intrinsics,
|
||||||
|
|
||||||
|
-readability-avoid-unconditional-preprocessor-if,
|
||||||
|
-readability-braces-around-statements,
|
||||||
|
-readability-convert-member-functions-to-static,
|
||||||
|
-readability-else-after-return,
|
||||||
|
-readability-function-cognitive-complexity,
|
||||||
|
-readability-function-size,
|
||||||
|
-readability-identifier-length,
|
||||||
|
-readability-identifier-naming, # useful but too slow
|
||||||
|
-readability-implicit-bool-conversion,
|
||||||
|
-readability-isolate-declaration,
|
||||||
|
-readability-magic-numbers,
|
||||||
|
-readability-named-parameter,
|
||||||
|
-readability-redundant-declaration,
|
||||||
|
-readability-simplify-boolean-expr,
|
||||||
|
-readability-static-accessed-through-instance,
|
||||||
|
-readability-suspicious-call-argument,
|
||||||
|
-readability-uppercase-literal-suffix,
|
||||||
|
-readability-use-anyofallof,
|
||||||
|
|
||||||
|
-zircon-*,
|
||||||
'
|
'
|
||||||
|
|
||||||
WarningsAsErrors: '*'
|
WarningsAsErrors: '*'
|
||||||
|
|
||||||
|
ExtraArgs:
|
||||||
|
# clang-tidy 17 started to complain (for unknown reasons) that various pragmas are unknown ("clang-diagnostic-unknown-pragmas").
|
||||||
|
# This is technically a compiler error, not a clang-tidy error. We could litter the code base with more pragmas that suppress
|
||||||
|
# this error but it is better to pass the following flag to the compiler:
|
||||||
|
- '-Wno-unknown-pragmas'
|
||||||
|
- '-Wno-unused-command-line-argument' # similar issue
|
||||||
|
|
||||||
CheckOptions:
|
CheckOptions:
|
||||||
- key: readability-identifier-naming.ClassCase
|
readability-identifier-naming.ClassCase: CamelCase
|
||||||
value: CamelCase
|
readability-identifier-naming.EnumCase: CamelCase
|
||||||
- key: readability-identifier-naming.EnumCase
|
readability-identifier-naming.LocalVariableCase: lower_case
|
||||||
value: CamelCase
|
readability-identifier-naming.StaticConstantCase: aNy_CasE
|
||||||
- key: readability-identifier-naming.LocalVariableCase
|
readability-identifier-naming.MemberCase: lower_case
|
||||||
value: lower_case
|
readability-identifier-naming.PrivateMemberPrefix: ''
|
||||||
- key: readability-identifier-naming.StaticConstantCase
|
readability-identifier-naming.ProtectedMemberPrefix: ''
|
||||||
value: aNy_CasE
|
readability-identifier-naming.PublicMemberCase: lower_case
|
||||||
- key: readability-identifier-naming.MemberCase
|
readability-identifier-naming.MethodCase: camelBack
|
||||||
value: lower_case
|
readability-identifier-naming.PrivateMethodPrefix: ''
|
||||||
- key: readability-identifier-naming.PrivateMemberPrefix
|
readability-identifier-naming.ProtectedMethodPrefix: ''
|
||||||
value: ''
|
readability-identifier-naming.ParameterPackCase: lower_case
|
||||||
- key: readability-identifier-naming.ProtectedMemberPrefix
|
readability-identifier-naming.StructCase: CamelCase
|
||||||
value: ''
|
readability-identifier-naming.TemplateTemplateParameterCase: CamelCase
|
||||||
- key: readability-identifier-naming.PublicMemberCase
|
readability-identifier-naming.TemplateParameterCase: lower_case
|
||||||
value: lower_case
|
readability-identifier-naming.TypeTemplateParameterCase: CamelCase
|
||||||
- key: readability-identifier-naming.MethodCase
|
readability-identifier-naming.TypedefCase: CamelCase
|
||||||
value: camelBack
|
readability-identifier-naming.UnionCase: CamelCase
|
||||||
- key: readability-identifier-naming.PrivateMethodPrefix
|
modernize-loop-convert.UseCxx20ReverseRanges: false
|
||||||
value: ''
|
performance-move-const-arg.CheckTriviallyCopyableMove: false
|
||||||
- key: readability-identifier-naming.ProtectedMethodPrefix
|
# Workaround clang-tidy bug: https://github.com/llvm/llvm-project/issues/46097
|
||||||
value: ''
|
readability-identifier-naming.TypeTemplateParameterIgnoredRegexp: expr-type
|
||||||
- key: readability-identifier-naming.ParameterPackCase
|
cppcoreguidelines-avoid-do-while.IgnoreMacros: true
|
||||||
value: lower_case
|
|
||||||
- key: readability-identifier-naming.StructCase
|
|
||||||
value: CamelCase
|
|
||||||
- key: readability-identifier-naming.TemplateTemplateParameterCase
|
|
||||||
value: CamelCase
|
|
||||||
- key: readability-identifier-naming.TemplateUsingCase
|
|
||||||
value: lower_case
|
|
||||||
- key: readability-identifier-naming.TypeTemplateParameterCase
|
|
||||||
value: CamelCase
|
|
||||||
- key: readability-identifier-naming.TypedefCase
|
|
||||||
value: CamelCase
|
|
||||||
- key: readability-identifier-naming.UnionCase
|
|
||||||
value: CamelCase
|
|
||||||
- key: readability-identifier-naming.UsingCase
|
|
||||||
value: CamelCase
|
|
||||||
- key: modernize-loop-convert.UseCxx20ReverseRanges
|
|
||||||
value: false
|
|
||||||
- key: performance-move-const-arg.CheckTriviallyCopyableMove
|
|
||||||
value: false
|
|
||||||
# Workaround clang-tidy bug: https://github.com/llvm/llvm-project/issues/46097
|
|
||||||
- key: readability-identifier-naming.TypeTemplateParameterIgnoredRegexp
|
|
||||||
value: expr-type
|
|
||||||
|
16
.clangd
Normal file
16
.clangd
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
Diagnostics:
|
||||||
|
# clangd does parse .clang-tidy, but some checks are too slow to run in
|
||||||
|
# clang-tidy build, so let's enable them explicitly for clangd at least.
|
||||||
|
ClangTidy:
|
||||||
|
# The following checks had been disabled due to slowliness with C++23,
|
||||||
|
# for more details see [1].
|
||||||
|
#
|
||||||
|
# [1]: https://github.com/llvm/llvm-project/issues/61418
|
||||||
|
#
|
||||||
|
# But the code base had been written in a style that had been checked
|
||||||
|
# by this check, so at least, let's enable it for clangd.
|
||||||
|
Add: [
|
||||||
|
# configured in .clang-tidy
|
||||||
|
readability-identifier-naming,
|
||||||
|
bugprone-reserved-identifier,
|
||||||
|
]
|
1
.exrc
Normal file
1
.exrc
Normal file
@ -0,0 +1 @@
|
|||||||
|
au BufRead,BufNewFile * set tabstop=4 softtabstop=0 expandtab shiftwidth=4 smarttab tags=tags,../tags
|
15
.git-blame-ignore-revs
Normal file
15
.git-blame-ignore-revs
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
# This is a file that can be used by git-blame to ignore some revisions.
|
||||||
|
# (git 2.23+, released in August 2019)
|
||||||
|
#
|
||||||
|
# Can be configured as follow:
|
||||||
|
#
|
||||||
|
# $ git config blame.ignoreRevsFile .git-blame-ignore-revs
|
||||||
|
#
|
||||||
|
# For more information you can look at git-blame(1) man page.
|
||||||
|
|
||||||
|
# Changed tabs to spaces in code [#CLICKHOUSE-3]
|
||||||
|
137ad95929ee016cc6d3c03bccb5586941c163ff
|
||||||
|
|
||||||
|
# dbms/ → src/
|
||||||
|
# (though it is unlikely that you will see it in blame)
|
||||||
|
06446b4f08a142d6f1bc30664c47ded88ab51782
|
1
.gitattributes
vendored
1
.gitattributes
vendored
@ -1,3 +1,4 @@
|
|||||||
contrib/* linguist-vendored
|
contrib/* linguist-vendored
|
||||||
*.h linguist-language=C++
|
*.h linguist-language=C++
|
||||||
tests/queries/0_stateless/data_json/* binary
|
tests/queries/0_stateless/data_json/* binary
|
||||||
|
tests/queries/0_stateless/*.reference -crlf
|
||||||
|
2
.github/ISSUE_TEMPLATE/10_question.md
vendored
2
.github/ISSUE_TEMPLATE/10_question.md
vendored
@ -7,6 +7,6 @@ assignees: ''
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
> Make sure to check documentation https://clickhouse.com/docs/en/ first. If the question is concise and probably has a short answer, asking it in Telegram chat https://telegram.me/clickhouse_en is probably the fastest way to find the answer. For more complicated questions, consider asking them on StackOverflow with "clickhouse" tag https://stackoverflow.com/questions/tagged/clickhouse
|
> Make sure to check documentation https://clickhouse.com/docs/en/ first. If the question is concise and probably has a short answer, asking it in [community Slack](https://join.slack.com/t/clickhousedb/shared_invite/zt-1gh9ds7f4-PgDhJAaF8ad5RbWBAAjzFg) is probably the fastest way to find the answer. For more complicated questions, consider asking them on StackOverflow with "clickhouse" tag https://stackoverflow.com/questions/tagged/clickhouse
|
||||||
|
|
||||||
> If you still prefer GitHub issues, remove all this text and ask your question here.
|
> If you still prefer GitHub issues, remove all this text and ask your question here.
|
||||||
|
9
.github/ISSUE_TEMPLATE/85_bug-report.md
vendored
9
.github/ISSUE_TEMPLATE/85_bug-report.md
vendored
@ -1,26 +1,29 @@
|
|||||||
---
|
---
|
||||||
name: Bug report
|
name: Bug report
|
||||||
about: Wrong behaviour (visible to users) in official ClickHouse release.
|
about: Wrong behavior (visible to users) in the official ClickHouse release.
|
||||||
title: ''
|
title: ''
|
||||||
labels: 'potential bug'
|
labels: 'potential bug'
|
||||||
assignees: ''
|
assignees: ''
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
> Please make sure that the version you're using is still supported (you can find the list [here](https://github.com/ClickHouse/ClickHouse/blob/master/SECURITY.md#scope-and-supported-versions)).
|
||||||
|
|
||||||
> You have to provide the following information whenever possible.
|
> You have to provide the following information whenever possible.
|
||||||
|
|
||||||
**Describe what's wrong**
|
**Describe what's wrong**
|
||||||
|
|
||||||
> A clear and concise description of what works not as it is supposed to.
|
> A clear and concise description of what works not as it is supposed to.
|
||||||
|
|
||||||
|
> A link to reproducer in [https://fiddle.clickhouse.com/](https://fiddle.clickhouse.com/).
|
||||||
|
|
||||||
**Does it reproduce on recent release?**
|
**Does it reproduce on recent release?**
|
||||||
|
|
||||||
[The list of releases](https://github.com/ClickHouse/ClickHouse/blob/master/utils/list-versions/version_date.tsv)
|
[The list of releases](https://github.com/ClickHouse/ClickHouse/blob/master/utils/list-versions/version_date.tsv)
|
||||||
|
|
||||||
**Enable crash reporting**
|
**Enable crash reporting**
|
||||||
|
|
||||||
> If possible, change "enabled" to true in "send_crash_reports" section in `config.xml`:
|
> Change "enabled" to true in "send_crash_reports" section in `config.xml`:
|
||||||
|
|
||||||
```
|
```
|
||||||
<send_crash_reports>
|
<send_crash_reports>
|
||||||
<!-- Changing <enabled> to true allows sending crash reports to -->
|
<!-- Changing <enabled> to true allows sending crash reports to -->
|
||||||
|
31
.github/ISSUE_TEMPLATE/96_installation-issues.md
vendored
Normal file
31
.github/ISSUE_TEMPLATE/96_installation-issues.md
vendored
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
---
|
||||||
|
name: Installation issue
|
||||||
|
about: Issue with ClickHouse installation from https://clickhouse.com/docs/en/install/
|
||||||
|
title: ''
|
||||||
|
labels: comp-install
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**I have tried the following solutions**: https://clickhouse.com/docs/en/faq/troubleshooting/#troubleshooting-installation-errors
|
||||||
|
|
||||||
|
**Installation type**
|
||||||
|
|
||||||
|
Packages, docker, single binary, curl?
|
||||||
|
|
||||||
|
**Source of the ClickHouse**
|
||||||
|
|
||||||
|
A link to the source. Or the command you've tried
|
||||||
|
|
||||||
|
**Expected result**
|
||||||
|
|
||||||
|
What you expected
|
||||||
|
|
||||||
|
**The actual result**
|
||||||
|
|
||||||
|
What you get
|
||||||
|
|
||||||
|
**How to reproduce**
|
||||||
|
|
||||||
|
* For Linux-based operating systems: provide a script for clear docker container from the official image
|
||||||
|
* For anything else: steps to reproduce on as much as possible clear system
|
27
.github/PULL_REQUEST_TEMPLATE.md
vendored
27
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -1,16 +1,41 @@
|
|||||||
|
<!---
|
||||||
|
A technical comment, you are free to remove or leave it as it is when PR is created
|
||||||
|
The following categories are used in the next scripts, update them accordingly
|
||||||
|
utils/changelog/changelog.py
|
||||||
|
tests/ci/cancel_and_rerun_workflow_lambda/app.py
|
||||||
|
-->
|
||||||
### Changelog category (leave one):
|
### Changelog category (leave one):
|
||||||
- New Feature
|
- New Feature
|
||||||
- Improvement
|
- Improvement
|
||||||
- Bug Fix (user-visible misbehaviour in official stable or prestable release)
|
|
||||||
- Performance Improvement
|
- Performance Improvement
|
||||||
- Backward Incompatible Change
|
- Backward Incompatible Change
|
||||||
- Build/Testing/Packaging Improvement
|
- Build/Testing/Packaging Improvement
|
||||||
- Documentation (changelog entry is not required)
|
- Documentation (changelog entry is not required)
|
||||||
|
- Bug Fix (user-visible misbehavior in an official stable release)
|
||||||
- Not for changelog (changelog entry is not required)
|
- Not for changelog (changelog entry is not required)
|
||||||
|
|
||||||
|
|
||||||
### Changelog entry (a user-readable short description of the changes that goes to CHANGELOG.md):
|
### Changelog entry (a user-readable short description of the changes that goes to CHANGELOG.md):
|
||||||
...
|
...
|
||||||
|
|
||||||
|
### Documentation entry for user-facing changes
|
||||||
|
|
||||||
|
- [ ] Documentation is written (mandatory for new features)
|
||||||
|
|
||||||
|
<!---
|
||||||
|
Directly edit documentation source files in the "docs" folder with the same pull-request as code changes
|
||||||
|
|
||||||
|
or
|
||||||
|
|
||||||
|
Add a user-readable short description of the changes that should be added to docs.clickhouse.com below.
|
||||||
|
|
||||||
|
At a minimum, the following information should be added (but add more as needed).
|
||||||
|
- Motivation: Why is this function, table engine, etc. useful to ClickHouse users?
|
||||||
|
|
||||||
|
- Parameters: If the feature being added takes arguments, options or is influenced by settings, please list them below with a brief explanation.
|
||||||
|
|
||||||
|
- Example use: A query or command.
|
||||||
|
-->
|
||||||
|
|
||||||
|
|
||||||
> Information about CI checks: https://clickhouse.com/docs/en/development/continuous-integration/
|
> Information about CI checks: https://clickhouse.com/docs/en/development/continuous-integration/
|
||||||
|
11
.github/actions/clean/action.yml
vendored
Normal file
11
.github/actions/clean/action.yml
vendored
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
name: Clean runner
|
||||||
|
description: Clean the runner's temp path on ending
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- name: Clean
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
|
sudo rm -fr "${{runner.temp}}"
|
35
.github/actions/common_setup/action.yml
vendored
Normal file
35
.github/actions/common_setup/action.yml
vendored
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
name: Common setup
|
||||||
|
description: Setup necessary environments
|
||||||
|
inputs:
|
||||||
|
job_type:
|
||||||
|
description: the name to use in the TEMP_PATH and REPO_COPY
|
||||||
|
default: common
|
||||||
|
type: string
|
||||||
|
nested_job:
|
||||||
|
description: the fuse for unintended use inside of the reusable callable jobs
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- name: Setup and check ENV
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "Setup the common ENV variables"
|
||||||
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
|
TEMP_PATH=${{runner.temp}}/${{inputs.job_type}}
|
||||||
|
REPO_COPY=${{runner.temp}}/${{inputs.job_type}}/git-repo-copy
|
||||||
|
IMAGES_PATH=${{runner.temp}}/images_path
|
||||||
|
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||||
|
EOF
|
||||||
|
if [ -z "${{env.GITHUB_JOB_OVERRIDDEN}}" ] && [ "true" == "${{inputs.nested_job}}" ]; then
|
||||||
|
echo "The GITHUB_JOB_OVERRIDDEN ENV is unset, and must be set for the nested jobs"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
- name: Setup $TEMP_PATH
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
# to remove every leftovers
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
mkdir -p "$REPO_COPY"
|
||||||
|
cp -a "$GITHUB_WORKSPACE"/. "$REPO_COPY"/
|
45
.github/workflows/auto_release.yml
vendored
Normal file
45
.github/workflows/auto_release.yml
vendored
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
name: AutoRelease
|
||||||
|
|
||||||
|
env:
|
||||||
|
# Force the stdout and stderr streams to be unbuffered
|
||||||
|
PYTHONUNBUFFERED: 1
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: auto-release
|
||||||
|
on: # yamllint disable-line rule:truthy
|
||||||
|
# schedule:
|
||||||
|
# - cron: '0 10-16 * * 1-5'
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
CherryPick:
|
||||||
|
runs-on: [self-hosted, style-checker-aarch64]
|
||||||
|
steps:
|
||||||
|
- name: Set envs
|
||||||
|
# https://docs.github.com/en/actions/learn-github-actions/workflow-commands-for-github-actions#multiline-strings
|
||||||
|
run: |
|
||||||
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
|
TEMP_PATH=${{runner.temp}}/cherry_pick
|
||||||
|
ROBOT_CLICKHOUSE_SSH_KEY<<RCSK
|
||||||
|
${{secrets.ROBOT_CLICKHOUSE_SSH_KEY}}
|
||||||
|
RCSK
|
||||||
|
REPO_OWNER=ClickHouse
|
||||||
|
REPO_NAME=ClickHouse
|
||||||
|
REPO_TEAM=core
|
||||||
|
EOF
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
token: ${{secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN}}
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: Auto-release
|
||||||
|
run: |
|
||||||
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
|
python3 auto_release.py --release-after-days=3
|
||||||
|
- name: Cleanup
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
43
.github/workflows/backport.yml
vendored
43
.github/workflows/backport.yml
vendored
@ -1,43 +0,0 @@
|
|||||||
name: CherryPick
|
|
||||||
|
|
||||||
env:
|
|
||||||
# Force the stdout and stderr streams to be unbuffered
|
|
||||||
PYTHONUNBUFFERED: 1
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: cherry-pick
|
|
||||||
on: # yamllint disable-line rule:truthy
|
|
||||||
schedule:
|
|
||||||
- cron: '0 */3 * * *'
|
|
||||||
jobs:
|
|
||||||
CherryPick:
|
|
||||||
runs-on: [self-hosted, style-checker]
|
|
||||||
steps:
|
|
||||||
- name: Set envs
|
|
||||||
# https://docs.github.com/en/actions/learn-github-actions/workflow-commands-for-github-actions#multiline-strings
|
|
||||||
run: |
|
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
|
||||||
TEMP_PATH=${{runner.temp}}/cherry_pick
|
|
||||||
ROBOT_CLICKHOUSE_SSH_KEY<<RCSK
|
|
||||||
${{secrets.ROBOT_CLICKHOUSE_SSH_KEY}}
|
|
||||||
RCSK
|
|
||||||
REPO_OWNER=ClickHouse
|
|
||||||
REPO_NAME=ClickHouse
|
|
||||||
REPO_TEAM=core
|
|
||||||
EOF
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
token: ${{secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN}}
|
|
||||||
fetch-depth: 0
|
|
||||||
- name: Cherry pick
|
|
||||||
run: |
|
|
||||||
sudo pip install GitPython
|
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
|
||||||
python3 cherry_pick.py
|
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker kill "$(docker ps -q)" ||:
|
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
633
.github/workflows/backport_branches.yml
vendored
633
.github/workflows/backport_branches.yml
vendored
@ -1,3 +1,4 @@
|
|||||||
|
# yamllint disable rule:comments-indentation
|
||||||
name: BackportPR
|
name: BackportPR
|
||||||
|
|
||||||
env:
|
env:
|
||||||
@ -9,68 +10,87 @@ on: # yamllint disable-line rule:truthy
|
|||||||
branches:
|
branches:
|
||||||
- 'backport/**'
|
- 'backport/**'
|
||||||
jobs:
|
jobs:
|
||||||
|
CheckLabels:
|
||||||
|
runs-on: [self-hosted, style-checker]
|
||||||
|
# Run the first check always, even if the CI is cancelled
|
||||||
|
if: ${{ always() }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
- name: Labels check
|
||||||
|
run: |
|
||||||
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
|
python3 run_check.py
|
||||||
PythonUnitTests:
|
PythonUnitTests:
|
||||||
runs-on: [self-hosted, style-checker]
|
runs-on: [self-hosted, style-checker]
|
||||||
|
needs: CheckLabels
|
||||||
steps:
|
steps:
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v2
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
- name: Python unit tests
|
- name: Python unit tests
|
||||||
run: |
|
run: |
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
python3 -m unittest discover -s . -p '*_test.py'
|
echo "Testing the main ci directory"
|
||||||
|
python3 -m unittest discover -s . -p 'test_*.py'
|
||||||
|
for dir in *_lambda/; do
|
||||||
|
echo "Testing $dir"
|
||||||
|
python3 -m unittest discover -s "$dir" -p 'test_*.py'
|
||||||
|
done
|
||||||
DockerHubPushAarch64:
|
DockerHubPushAarch64:
|
||||||
runs-on: [self-hosted, style-checker-aarch64]
|
runs-on: [self-hosted, style-checker-aarch64]
|
||||||
|
needs: CheckLabels
|
||||||
steps:
|
steps:
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v2
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
- name: Images check
|
- name: Images check
|
||||||
run: |
|
run: |
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
python3 docker_images_check.py --suffix aarch64
|
python3 docker_images_check.py --suffix aarch64
|
||||||
- name: Upload images files to artifacts
|
- name: Upload images files to artifacts
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: changed_images_aarch64
|
name: changed_images_aarch64
|
||||||
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
|
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
|
||||||
DockerHubPushAmd64:
|
DockerHubPushAmd64:
|
||||||
runs-on: [self-hosted, style-checker]
|
runs-on: [self-hosted, style-checker]
|
||||||
|
needs: CheckLabels
|
||||||
steps:
|
steps:
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v2
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
- name: Images check
|
- name: Images check
|
||||||
run: |
|
run: |
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
python3 docker_images_check.py --suffix amd64
|
python3 docker_images_check.py --suffix amd64
|
||||||
- name: Upload images files to artifacts
|
- name: Upload images files to artifacts
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: changed_images_amd64
|
name: changed_images_amd64
|
||||||
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
|
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
|
||||||
DockerHubPush:
|
DockerHubPush:
|
||||||
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
|
needs: [DockerHubPushAmd64, DockerHubPushAarch64, PythonUnitTests]
|
||||||
runs-on: [self-hosted, style-checker]
|
runs-on: [self-hosted, style-checker]
|
||||||
steps:
|
steps:
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v2
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
fetch-depth: 0 # to find ancestor merge commits necessary for finding proper docker tags
|
||||||
|
filter: tree:0
|
||||||
- name: Download changed aarch64 images
|
- name: Download changed aarch64 images
|
||||||
uses: actions/download-artifact@v2
|
uses: actions/download-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: changed_images_aarch64
|
name: changed_images_aarch64
|
||||||
path: ${{ runner.temp }}
|
path: ${{ runner.temp }}
|
||||||
- name: Download changed amd64 images
|
- name: Download changed amd64 images
|
||||||
uses: actions/download-artifact@v2
|
uses: actions/download-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: changed_images_amd64
|
name: changed_images_amd64
|
||||||
path: ${{ runner.temp }}
|
path: ${{ runner.temp }}
|
||||||
@ -79,481 +99,230 @@ jobs:
|
|||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
|
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
|
||||||
- name: Upload images files to artifacts
|
- name: Upload images files to artifacts
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: changed_images
|
name: changed_images
|
||||||
path: ${{ runner.temp }}/changed_images.json
|
path: ${{ runner.temp }}/changed_images.json
|
||||||
CompatibilityCheck:
|
CompatibilityCheckX86:
|
||||||
needs: [BuilderDebRelease]
|
needs: [BuilderDebRelease]
|
||||||
runs-on: [self-hosted, style-checker]
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
steps:
|
with:
|
||||||
- name: Set envs
|
test_name: Compatibility check X86
|
||||||
run: |
|
runner_type: style-checker
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
run_command: |
|
||||||
TEMP_PATH=${{runner.temp}}/compatibility_check
|
cd "$REPO_COPY/tests/ci"
|
||||||
REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse
|
python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions
|
||||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
CompatibilityCheckAarch64:
|
||||||
EOF
|
needs: [BuilderDebAarch64]
|
||||||
- name: Clear repository
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
run: |
|
with:
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
test_name: Compatibility check X86
|
||||||
- name: Check out repository code
|
runner_type: style-checker
|
||||||
uses: actions/checkout@v2
|
run_command: |
|
||||||
- name: Download json reports
|
cd "$REPO_COPY/tests/ci"
|
||||||
uses: actions/download-artifact@v2
|
python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc
|
||||||
with:
|
|
||||||
path: ${{ env.REPORTS_PATH }}
|
|
||||||
- name: CompatibilityCheck
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py
|
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker kill "$(docker ps -q)" ||:
|
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
#########################################################################################
|
#########################################################################################
|
||||||
#################################### ORDINARY BUILDS ####################################
|
#################################### ORDINARY BUILDS ####################################
|
||||||
#########################################################################################
|
#########################################################################################
|
||||||
BuilderDebRelease:
|
BuilderDebRelease:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
runs-on: [self-hosted, builder]
|
uses: ./.github/workflows/reusable_build.yml
|
||||||
steps:
|
with:
|
||||||
- name: Set envs
|
build_name: package_release
|
||||||
run: |
|
checkout_depth: 0
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
|
||||||
TEMP_PATH=${{runner.temp}}/build_check
|
|
||||||
IMAGES_PATH=${{runner.temp}}/images_path
|
|
||||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
|
||||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
|
||||||
CHECK_NAME=ClickHouse build check (actions)
|
|
||||||
BUILD_NAME=package_release
|
|
||||||
EOF
|
|
||||||
- name: Download changed images
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: changed_images
|
|
||||||
path: ${{ env.IMAGES_PATH }}
|
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: Build
|
|
||||||
run: |
|
|
||||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
|
||||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
|
|
||||||
- name: Upload build URLs to artifacts
|
|
||||||
if: ${{ success() || failure() }}
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: ${{ env.BUILD_URLS }}
|
|
||||||
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker kill "$(docker ps -q)" ||:
|
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
|
||||||
BuilderDebAarch64:
|
BuilderDebAarch64:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
runs-on: [self-hosted, builder]
|
uses: ./.github/workflows/reusable_build.yml
|
||||||
steps:
|
with:
|
||||||
- name: Set envs
|
build_name: package_aarch64
|
||||||
run: |
|
checkout_depth: 0
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
|
||||||
TEMP_PATH=${{runner.temp}}/build_check
|
|
||||||
IMAGES_PATH=${{runner.temp}}/images_path
|
|
||||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
|
||||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
|
||||||
CHECK_NAME=ClickHouse build check (actions)
|
|
||||||
BUILD_NAME=package_aarch64
|
|
||||||
EOF
|
|
||||||
- name: Download changed images
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: changed_images
|
|
||||||
path: ${{ env.IMAGES_PATH }}
|
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: Build
|
|
||||||
run: |
|
|
||||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
|
||||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
|
|
||||||
- name: Upload build URLs to artifacts
|
|
||||||
if: ${{ success() || failure() }}
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: ${{ env.BUILD_URLS }}
|
|
||||||
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker kill "$(docker ps -q)" ||:
|
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
|
||||||
BuilderDebAsan:
|
BuilderDebAsan:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
runs-on: [self-hosted, builder]
|
uses: ./.github/workflows/reusable_build.yml
|
||||||
steps:
|
with:
|
||||||
- name: Set envs
|
build_name: package_asan
|
||||||
run: |
|
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
|
||||||
TEMP_PATH=${{runner.temp}}/build_check
|
|
||||||
IMAGES_PATH=${{runner.temp}}/images_path
|
|
||||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
|
||||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
|
||||||
CHECK_NAME=ClickHouse build check (actions)
|
|
||||||
BUILD_NAME=package_asan
|
|
||||||
EOF
|
|
||||||
- name: Download changed images
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: changed_images
|
|
||||||
path: ${{ env.IMAGES_PATH }}
|
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: Build
|
|
||||||
run: |
|
|
||||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
|
||||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
|
|
||||||
- name: Upload build URLs to artifacts
|
|
||||||
if: ${{ success() || failure() }}
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: ${{ env.BUILD_URLS }}
|
|
||||||
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker kill "$(docker ps -q)" ||:
|
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
|
||||||
BuilderDebTsan:
|
BuilderDebTsan:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
runs-on: [self-hosted, builder]
|
uses: ./.github/workflows/reusable_build.yml
|
||||||
steps:
|
with:
|
||||||
- name: Set envs
|
build_name: package_tsan
|
||||||
run: |
|
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
|
||||||
TEMP_PATH=${{runner.temp}}/build_check
|
|
||||||
IMAGES_PATH=${{runner.temp}}/images_path
|
|
||||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
|
||||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
|
||||||
CHECK_NAME=ClickHouse build check (actions)
|
|
||||||
BUILD_NAME=package_tsan
|
|
||||||
EOF
|
|
||||||
- name: Download changed images
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: changed_images
|
|
||||||
path: ${{ env.IMAGES_PATH }}
|
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: Build
|
|
||||||
run: |
|
|
||||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
|
||||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
|
|
||||||
- name: Upload build URLs to artifacts
|
|
||||||
if: ${{ success() || failure() }}
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: ${{ env.BUILD_URLS }}
|
|
||||||
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker kill "$(docker ps -q)" ||:
|
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
|
||||||
BuilderDebDebug:
|
BuilderDebDebug:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
runs-on: [self-hosted, builder]
|
uses: ./.github/workflows/reusable_build.yml
|
||||||
|
with:
|
||||||
|
build_name: package_debug
|
||||||
|
BuilderBinDarwin:
|
||||||
|
needs: [DockerHubPush]
|
||||||
|
uses: ./.github/workflows/reusable_build.yml
|
||||||
|
with:
|
||||||
|
build_name: binary_darwin
|
||||||
|
checkout_depth: 0
|
||||||
|
BuilderBinDarwinAarch64:
|
||||||
|
needs: [DockerHubPush]
|
||||||
|
uses: ./.github/workflows/reusable_build.yml
|
||||||
|
with:
|
||||||
|
build_name: binary_darwin_aarch64
|
||||||
|
checkout_depth: 0
|
||||||
|
############################################################################################
|
||||||
|
##################################### Docker images #######################################
|
||||||
|
############################################################################################
|
||||||
|
DockerServerImages:
|
||||||
|
needs:
|
||||||
|
- BuilderDebRelease
|
||||||
|
- BuilderDebAarch64
|
||||||
|
runs-on: [self-hosted, style-checker]
|
||||||
steps:
|
steps:
|
||||||
- name: Set envs
|
|
||||||
run: |
|
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
|
||||||
TEMP_PATH=${{runner.temp}}/build_check
|
|
||||||
IMAGES_PATH=${{runner.temp}}/images_path
|
|
||||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
|
||||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
|
||||||
CHECK_NAME=ClickHouse build check (actions)
|
|
||||||
BUILD_NAME=package_debug
|
|
||||||
EOF
|
|
||||||
- name: Download changed images
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: changed_images
|
|
||||||
path: ${{ env.IMAGES_PATH }}
|
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v2
|
uses: ClickHouse/checkout@v1
|
||||||
- name: Build
|
|
||||||
run: |
|
|
||||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
|
||||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
|
|
||||||
- name: Upload build URLs to artifacts
|
|
||||||
if: ${{ success() || failure() }}
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
with:
|
||||||
name: ${{ env.BUILD_URLS }}
|
clear-repository: true
|
||||||
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself
|
||||||
|
filter: tree:0
|
||||||
|
- name: Check docker clickhouse/clickhouse-server building
|
||||||
|
run: |
|
||||||
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
|
python3 docker_server.py --release-type head --no-push \
|
||||||
|
--image-repo clickhouse/clickhouse-server --image-path docker/server
|
||||||
|
python3 docker_server.py --release-type head --no-push \
|
||||||
|
--image-repo clickhouse/clickhouse-keeper --image-path docker/keeper
|
||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
############################################################################################
|
############################################################################################
|
||||||
##################################### BUILD REPORTER #######################################
|
##################################### BUILD REPORTER #######################################
|
||||||
############################################################################################
|
############################################################################################
|
||||||
BuilderReport:
|
BuilderReport:
|
||||||
|
if: ${{ success() || failure() }}
|
||||||
needs:
|
needs:
|
||||||
- BuilderDebRelease
|
- BuilderDebRelease
|
||||||
- BuilderDebAarch64
|
- BuilderDebAarch64
|
||||||
- BuilderDebAsan
|
- BuilderDebAsan
|
||||||
- BuilderDebTsan
|
- BuilderDebTsan
|
||||||
- BuilderDebDebug
|
- BuilderDebDebug
|
||||||
runs-on: [self-hosted, style-checker]
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
steps:
|
with:
|
||||||
- name: Set envs
|
test_name: ClickHouse build check
|
||||||
run: |
|
runner_type: style-checker
|
||||||
DEPENDENCIES=$(cat << 'EOF' | jq '. | length'
|
additional_envs: |
|
||||||
${{ toJSON(needs) }}
|
NEEDS_DATA<<NDENV
|
||||||
EOF
|
${{ toJSON(needs) }}
|
||||||
)
|
NDENV
|
||||||
echo "DEPENDENCIES=$DEPENDENCIES" >> "$GITHUB_ENV"
|
run_command: |
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
CHECK_NAME=ClickHouse build check (actions)
|
python3 build_report_check.py "$CHECK_NAME"
|
||||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
BuilderSpecialReport:
|
||||||
TEMP_PATH=${{runner.temp}}/report_check
|
if: ${{ success() || failure() }}
|
||||||
EOF
|
needs:
|
||||||
- name: Download json reports
|
- BuilderBinDarwin
|
||||||
uses: actions/download-artifact@v2
|
- BuilderBinDarwinAarch64
|
||||||
with:
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
path: ${{ env.REPORTS_PATH }}
|
with:
|
||||||
- name: Clear repository
|
test_name: ClickHouse special build check
|
||||||
run: |
|
runner_type: style-checker
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
additional_envs: |
|
||||||
- name: Check out repository code
|
NEEDS_DATA<<NDENV
|
||||||
uses: actions/checkout@v2
|
${{ toJSON(needs) }}
|
||||||
- name: Report Builder
|
NDENV
|
||||||
run: |
|
run_command: |
|
||||||
sudo rm -fr "$TEMP_PATH"
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
mkdir -p "$TEMP_PATH"
|
python3 build_report_check.py "$CHECK_NAME"
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
############################################################################################
|
||||||
python3 build_report_check.py "$CHECK_NAME" "$DEPENDENCIES"
|
#################################### INSTALL PACKAGES ######################################
|
||||||
- name: Cleanup
|
############################################################################################
|
||||||
if: always()
|
InstallPackagesTestRelease:
|
||||||
run: |
|
needs: [BuilderDebRelease]
|
||||||
docker kill "$(docker ps -q)" ||:
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
with:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
test_name: Install packages (amd64)
|
||||||
|
runner_type: style-checker
|
||||||
|
run_command: |
|
||||||
|
cd "$REPO_COPY/tests/ci"
|
||||||
|
python3 install_check.py "$CHECK_NAME"
|
||||||
|
InstallPackagesTestAarch64:
|
||||||
|
needs: [BuilderDebAarch64]
|
||||||
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
|
with:
|
||||||
|
test_name: Install packages (arm64)
|
||||||
|
runner_type: style-checker-aarch64
|
||||||
|
run_command: |
|
||||||
|
cd "$REPO_COPY/tests/ci"
|
||||||
|
python3 install_check.py "$CHECK_NAME"
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
########################### FUNCTIONAl STATELESS TESTS #######################################
|
########################### FUNCTIONAl STATELESS TESTS #######################################
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
FunctionalStatelessTestAsan:
|
FunctionalStatelessTestAsan:
|
||||||
needs: [BuilderDebAsan]
|
needs: [BuilderDebAsan]
|
||||||
runs-on: [self-hosted, func-tester]
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
steps:
|
with:
|
||||||
- name: Set envs
|
test_name: Stateless tests (asan)
|
||||||
run: |
|
runner_type: func-tester
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
additional_envs: |
|
||||||
TEMP_PATH=${{runner.temp}}/stateless_debug
|
KILL_TIMEOUT=10800
|
||||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
run_command: |
|
||||||
CHECK_NAME=Stateless tests (address, actions)
|
cd "$REPO_COPY/tests/ci"
|
||||||
REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse
|
python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT"
|
||||||
KILL_TIMEOUT=10800
|
|
||||||
EOF
|
|
||||||
- name: Download json reports
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
path: ${{ env.REPORTS_PATH }}
|
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: Functional test
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci"
|
|
||||||
python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT"
|
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker kill "$(docker ps -q)" ||:
|
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
############################ FUNCTIONAl STATEFUL TESTS #######################################
|
############################ FUNCTIONAl STATEFUL TESTS #######################################
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
FunctionalStatefulTestDebug:
|
FunctionalStatefulTestDebug:
|
||||||
needs: [BuilderDebDebug]
|
needs: [BuilderDebDebug]
|
||||||
runs-on: [self-hosted, func-tester]
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
steps:
|
with:
|
||||||
- name: Set envs
|
test_name: Stateful tests (debug)
|
||||||
run: |
|
runner_type: func-tester
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
additional_envs: |
|
||||||
TEMP_PATH=${{runner.temp}}/stateful_debug
|
KILL_TIMEOUT=3600
|
||||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
run_command: |
|
||||||
CHECK_NAME=Stateful tests (debug, actions)
|
cd "$REPO_COPY/tests/ci"
|
||||||
REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse
|
python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT"
|
||||||
KILL_TIMEOUT=3600
|
|
||||||
EOF
|
|
||||||
- name: Download json reports
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
path: ${{ env.REPORTS_PATH }}
|
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: Functional test
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci"
|
|
||||||
python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT"
|
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker kill "$(docker ps -q)" ||:
|
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
######################################### STRESS TESTS #######################################
|
######################################### STRESS TESTS #######################################
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
StressTestTsan:
|
StressTestTsan:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
# func testers have 16 cores + 128 GB memory
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
# while stress testers have 36 cores + 72 memory
|
with:
|
||||||
# It would be better to have something like 32 + 128,
|
test_name: Stress test (tsan)
|
||||||
# but such servers almost unavailable as spot instances.
|
runner_type: stress-tester
|
||||||
runs-on: [self-hosted, func-tester]
|
run_command: |
|
||||||
steps:
|
cd "$REPO_COPY/tests/ci"
|
||||||
- name: Set envs
|
python3 stress_check.py "$CHECK_NAME"
|
||||||
run: |
|
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
|
||||||
TEMP_PATH=${{runner.temp}}/stress_thread
|
|
||||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
|
||||||
CHECK_NAME=Stress test (thread, actions)
|
|
||||||
REPO_COPY=${{runner.temp}}/stress_thread/ClickHouse
|
|
||||||
EOF
|
|
||||||
- name: Download json reports
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
path: ${{ env.REPORTS_PATH }}
|
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: Stress test
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci"
|
|
||||||
python3 stress_check.py "$CHECK_NAME"
|
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker kill "$(docker ps -q)" ||:
|
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
#############################################################################################
|
#############################################################################################
|
||||||
############################# INTEGRATION TESTS #############################################
|
############################# INTEGRATION TESTS #############################################
|
||||||
#############################################################################################
|
#############################################################################################
|
||||||
IntegrationTestsRelease:
|
IntegrationTestsRelease:
|
||||||
needs: [BuilderDebRelease]
|
needs: [BuilderDebRelease]
|
||||||
runs-on: [self-hosted, stress-tester]
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
steps:
|
with:
|
||||||
- name: Set envs
|
test_name: Integration tests (release)
|
||||||
run: |
|
runner_type: stress-tester
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
batches: 4
|
||||||
TEMP_PATH=${{runner.temp}}/integration_tests_release
|
run_command: |
|
||||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
cd "$REPO_COPY/tests/ci"
|
||||||
CHECK_NAME=Integration tests (release, actions)
|
python3 integration_test_check.py "$CHECK_NAME"
|
||||||
REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse
|
|
||||||
EOF
|
|
||||||
- name: Download json reports
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
path: ${{ env.REPORTS_PATH }}
|
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: Integration test
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci"
|
|
||||||
python3 integration_test_check.py "$CHECK_NAME"
|
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker kill "$(docker ps -q)" ||:
|
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
FinishCheck:
|
FinishCheck:
|
||||||
needs:
|
needs:
|
||||||
- DockerHubPush
|
- DockerHubPush
|
||||||
|
- DockerServerImages
|
||||||
- BuilderReport
|
- BuilderReport
|
||||||
|
- BuilderSpecialReport
|
||||||
- FunctionalStatelessTestAsan
|
- FunctionalStatelessTestAsan
|
||||||
- FunctionalStatefulTestDebug
|
- FunctionalStatefulTestDebug
|
||||||
- StressTestTsan
|
- StressTestTsan
|
||||||
- IntegrationTestsRelease
|
- IntegrationTestsRelease
|
||||||
- CompatibilityCheck
|
- CompatibilityCheckX86
|
||||||
|
- CompatibilityCheckAarch64
|
||||||
runs-on: [self-hosted, style-checker]
|
runs-on: [self-hosted, style-checker]
|
||||||
steps:
|
steps:
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v2
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
- name: Finish label
|
- name: Finish label
|
||||||
run: |
|
run: |
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
python3 finish_check.py
|
python3 finish_check.py
|
||||||
|
python3 merge_pr.py
|
||||||
|
2
.github/workflows/cancel.yml
vendored
2
.github/workflows/cancel.yml
vendored
@ -6,7 +6,7 @@ env:
|
|||||||
|
|
||||||
on: # yamllint disable-line rule:truthy
|
on: # yamllint disable-line rule:truthy
|
||||||
workflow_run:
|
workflow_run:
|
||||||
workflows: ["PullRequestCI", "ReleaseCI", "DocsCheck", "BackportPR"]
|
workflows: ["PullRequestCI", "ReleaseBranchCI", "DocsCheck", "BackportPR"]
|
||||||
types:
|
types:
|
||||||
- requested
|
- requested
|
||||||
jobs:
|
jobs:
|
||||||
|
45
.github/workflows/cherry_pick.yml
vendored
Normal file
45
.github/workflows/cherry_pick.yml
vendored
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
name: CherryPick
|
||||||
|
|
||||||
|
env:
|
||||||
|
# Force the stdout and stderr streams to be unbuffered
|
||||||
|
PYTHONUNBUFFERED: 1
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: cherry-pick
|
||||||
|
on: # yamllint disable-line rule:truthy
|
||||||
|
schedule:
|
||||||
|
- cron: '0 * * * *'
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
CherryPick:
|
||||||
|
runs-on: [self-hosted, style-checker-aarch64]
|
||||||
|
steps:
|
||||||
|
- name: Set envs
|
||||||
|
# https://docs.github.com/en/actions/learn-github-actions/workflow-commands-for-github-actions#multiline-strings
|
||||||
|
run: |
|
||||||
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
|
TEMP_PATH=${{runner.temp}}/cherry_pick
|
||||||
|
ROBOT_CLICKHOUSE_SSH_KEY<<RCSK
|
||||||
|
${{secrets.ROBOT_CLICKHOUSE_SSH_KEY}}
|
||||||
|
RCSK
|
||||||
|
REPO_OWNER=ClickHouse
|
||||||
|
REPO_NAME=ClickHouse
|
||||||
|
REPO_TEAM=core
|
||||||
|
EOF
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
token: ${{secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN}}
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: Cherry pick
|
||||||
|
run: |
|
||||||
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
|
python3 cherry_pick.py
|
||||||
|
- name: Cleanup
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
79
.github/workflows/codeql.yml
vendored
79
.github/workflows/codeql.yml
vendored
@ -1,79 +0,0 @@
|
|||||||
# For most projects, this workflow file will not need changing; you simply need
|
|
||||||
# to commit it to your repository.
|
|
||||||
#
|
|
||||||
# You may wish to alter this file to override the set of languages analyzed,
|
|
||||||
# or to provide custom queries or build logic.
|
|
||||||
#
|
|
||||||
# ******** NOTE ********
|
|
||||||
# We have attempted to detect the languages in your repository. Please check
|
|
||||||
# the `language` matrix defined below to confirm you have the correct set of
|
|
||||||
# supported CodeQL languages.
|
|
||||||
#
|
|
||||||
name: "CodeQL"
|
|
||||||
|
|
||||||
"on":
|
|
||||||
# push:
|
|
||||||
# branches: [ master ]
|
|
||||||
# pull_request:
|
|
||||||
# # The branches below must be a subset of the branches above
|
|
||||||
# branches: [ master ]
|
|
||||||
schedule:
|
|
||||||
- cron: '0 */6 * * *'
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
env:
|
|
||||||
CC: clang-14
|
|
||||||
CXX: clang++-14
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
analyze:
|
|
||||||
name: Analyze
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
actions: read
|
|
||||||
contents: read
|
|
||||||
security-events: write
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
language: ['cpp']
|
|
||||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
|
||||||
# Learn more about CodeQL language support at https://git.io/codeql-language-support
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
submodules: 'true'
|
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
|
||||||
- name: Initialize CodeQL
|
|
||||||
uses: github/codeql-action/init@v2
|
|
||||||
with:
|
|
||||||
languages: ${{ matrix.language }}
|
|
||||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
|
||||||
# By default, queries listed here will override any specified in a config file.
|
|
||||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
|
||||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
|
||||||
|
|
||||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
|
||||||
# If this step fails, then you should remove it and run the build manually (see below)
|
|
||||||
# - name: Autobuild
|
|
||||||
# uses: github/codeql-action/autobuild@v2
|
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
|
||||||
# 📚 https://git.io/JvXDl
|
|
||||||
|
|
||||||
- name: Build
|
|
||||||
run: |
|
|
||||||
sudo apt-get install -yq git cmake python ninja-build
|
|
||||||
sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)"
|
|
||||||
mkdir build
|
|
||||||
cd build
|
|
||||||
cmake -DUSE_STATIC_LIBRARIES=0 -DSPLIT_SHARED_LIBRARIES=1 -DCLICKHOUSE_SPLIT_BINARY=1 ..
|
|
||||||
ninja
|
|
||||||
rm -rf ../contrib
|
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
|
||||||
uses: github/codeql-action/analyze@v2
|
|
4
.github/workflows/debug.yml
vendored
4
.github/workflows/debug.yml
vendored
@ -2,10 +2,10 @@
|
|||||||
name: Debug
|
name: Debug
|
||||||
|
|
||||||
'on':
|
'on':
|
||||||
[push, pull_request, release, workflow_dispatch]
|
[push, pull_request, pull_request_review, release, workflow_dispatch, workflow_call]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
DebugInfo:
|
DebugInfo:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: hmarr/debug-action@1201a20fc9d278ddddd5f0f46922d06513892491
|
- uses: hmarr/debug-action@a701ed95a46e6f2fb0df25e1a558c16356fae35a
|
||||||
|
111
.github/workflows/docs_check.yml
vendored
111
.github/workflows/docs_check.yml
vendored
@ -4,7 +4,7 @@ env:
|
|||||||
# Force the stdout and stderr streams to be unbuffered
|
# Force the stdout and stderr streams to be unbuffered
|
||||||
PYTHONUNBUFFERED: 1
|
PYTHONUNBUFFERED: 1
|
||||||
|
|
||||||
on: # yamllint disable-line rule:truthy
|
on: # yamllint disable-line rule:truthy
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- synchronize
|
- synchronize
|
||||||
@ -13,17 +13,20 @@ on: # yamllint disable-line rule:truthy
|
|||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
paths:
|
paths:
|
||||||
|
- '**.md'
|
||||||
|
- 'docker/docs/**'
|
||||||
- 'docs/**'
|
- 'docs/**'
|
||||||
- 'website/**'
|
- 'utils/check-style/aspell-ignore/**'
|
||||||
|
- 'tests/ci/docs_check.py'
|
||||||
|
- '.github/workflows/docs_check.yml'
|
||||||
jobs:
|
jobs:
|
||||||
CheckLabels:
|
CheckLabels:
|
||||||
runs-on: [self-hosted, style-checker]
|
runs-on: [self-hosted, style-checker]
|
||||||
steps:
|
steps:
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -rf "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v2
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
- name: Labels check
|
- name: Labels check
|
||||||
run: |
|
run: |
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
@ -32,17 +35,16 @@ jobs:
|
|||||||
needs: CheckLabels
|
needs: CheckLabels
|
||||||
runs-on: [self-hosted, style-checker-aarch64]
|
runs-on: [self-hosted, style-checker-aarch64]
|
||||||
steps:
|
steps:
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v2
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
- name: Images check
|
- name: Images check
|
||||||
run: |
|
run: |
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
python3 docker_images_check.py --suffix aarch64
|
python3 docker_images_check.py --suffix aarch64
|
||||||
- name: Upload images files to artifacts
|
- name: Upload images files to artifacts
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: changed_images_aarch64
|
name: changed_images_aarch64
|
||||||
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
|
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
|
||||||
@ -50,17 +52,16 @@ jobs:
|
|||||||
needs: CheckLabels
|
needs: CheckLabels
|
||||||
runs-on: [self-hosted, style-checker]
|
runs-on: [self-hosted, style-checker]
|
||||||
steps:
|
steps:
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v2
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
- name: Images check
|
- name: Images check
|
||||||
run: |
|
run: |
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
python3 docker_images_check.py --suffix amd64
|
python3 docker_images_check.py --suffix amd64
|
||||||
- name: Upload images files to artifacts
|
- name: Upload images files to artifacts
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: changed_images_amd64
|
name: changed_images_amd64
|
||||||
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
|
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
|
||||||
@ -68,18 +69,19 @@ jobs:
|
|||||||
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
|
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
|
||||||
runs-on: [self-hosted, style-checker]
|
runs-on: [self-hosted, style-checker]
|
||||||
steps:
|
steps:
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v2
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
fetch-depth: 0 # to find ancestor merge commits necessary for finding proper docker tags
|
||||||
|
filter: tree:0
|
||||||
- name: Download changed aarch64 images
|
- name: Download changed aarch64 images
|
||||||
uses: actions/download-artifact@v2
|
uses: actions/download-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: changed_images_aarch64
|
name: changed_images_aarch64
|
||||||
path: ${{ runner.temp }}
|
path: ${{ runner.temp }}
|
||||||
- name: Download changed amd64 images
|
- name: Download changed amd64 images
|
||||||
uses: actions/download-artifact@v2
|
uses: actions/download-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: changed_images_amd64
|
name: changed_images_amd64
|
||||||
path: ${{ runner.temp }}
|
path: ${{ runner.temp }}
|
||||||
@ -88,38 +90,49 @@ jobs:
|
|||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
|
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
|
||||||
- name: Upload images files to artifacts
|
- name: Upload images files to artifacts
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: changed_images
|
name: changed_images
|
||||||
path: ${{ runner.temp }}/changed_images.json
|
path: ${{ runner.temp }}/changed_images.json
|
||||||
|
StyleCheck:
|
||||||
|
needs: DockerHubPush
|
||||||
|
# We need additional `&& ! cancelled()` to have the job being able to cancel
|
||||||
|
if: ${{ success() || failure() || ( always() && ! cancelled() ) }}
|
||||||
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
|
with:
|
||||||
|
test_name: Style check
|
||||||
|
runner_type: style-checker
|
||||||
|
run_command: |
|
||||||
|
cd "$REPO_COPY/tests/ci"
|
||||||
|
python3 style_check.py
|
||||||
|
secrets:
|
||||||
|
secret_envs: |
|
||||||
|
ROBOT_CLICKHOUSE_SSH_KEY<<RCSK
|
||||||
|
${{secrets.ROBOT_CLICKHOUSE_SSH_KEY}}
|
||||||
|
RCSK
|
||||||
DocsCheck:
|
DocsCheck:
|
||||||
needs: DockerHubPush
|
needs: DockerHubPush
|
||||||
runs-on: [self-hosted, func-tester-aarch64]
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
|
with:
|
||||||
|
test_name: Docs check
|
||||||
|
runner_type: func-tester-aarch64
|
||||||
|
additional_envs: |
|
||||||
|
run_command: |
|
||||||
|
cd "$REPO_COPY/tests/ci"
|
||||||
|
python3 docs_check.py
|
||||||
|
FinishCheck:
|
||||||
|
needs:
|
||||||
|
- StyleCheck
|
||||||
|
- DockerHubPush
|
||||||
|
- DocsCheck
|
||||||
|
runs-on: [self-hosted, style-checker]
|
||||||
steps:
|
steps:
|
||||||
- name: Set envs
|
|
||||||
run: |
|
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
|
||||||
TEMP_PATH=${{runner.temp}}/docs_check
|
|
||||||
REPO_COPY=${{runner.temp}}/docs_check/ClickHouse
|
|
||||||
EOF
|
|
||||||
- name: Download changed images
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: changed_images
|
|
||||||
path: ${{ env.TEMP_PATH }}
|
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -rf "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v2
|
uses: ClickHouse/checkout@v1
|
||||||
- name: Docs Check
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
- name: Finish label
|
||||||
run: |
|
run: |
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
cd "$REPO_COPY/tests/ci"
|
python3 finish_check.py
|
||||||
python3 docs_check.py
|
python3 merge_pr.py --check-approved
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker kill "$(docker ps -q)" ||:
|
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
|
121
.github/workflows/docs_release.yml
vendored
121
.github/workflows/docs_release.yml
vendored
@ -1,121 +0,0 @@
|
|||||||
name: DocsReleaseChecks
|
|
||||||
|
|
||||||
env:
|
|
||||||
# Force the stdout and stderr streams to be unbuffered
|
|
||||||
PYTHONUNBUFFERED: 1
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: master-release
|
|
||||||
cancel-in-progress: true
|
|
||||||
on: # yamllint disable-line rule:truthy
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
paths:
|
|
||||||
- 'docs/**'
|
|
||||||
- 'website/**'
|
|
||||||
- 'benchmark/**'
|
|
||||||
- 'docker/**'
|
|
||||||
- '.github/**'
|
|
||||||
workflow_dispatch:
|
|
||||||
jobs:
|
|
||||||
DockerHubPushAarch64:
|
|
||||||
runs-on: [self-hosted, style-checker-aarch64]
|
|
||||||
steps:
|
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: Images check
|
|
||||||
run: |
|
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
|
||||||
python3 docker_images_check.py --suffix aarch64
|
|
||||||
- name: Upload images files to artifacts
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: changed_images_aarch64
|
|
||||||
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
|
|
||||||
DockerHubPushAmd64:
|
|
||||||
runs-on: [self-hosted, style-checker]
|
|
||||||
steps:
|
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: Images check
|
|
||||||
run: |
|
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
|
||||||
python3 docker_images_check.py --suffix amd64
|
|
||||||
- name: Upload images files to artifacts
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: changed_images_amd64
|
|
||||||
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
|
|
||||||
DockerHubPush:
|
|
||||||
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
|
|
||||||
runs-on: [self-hosted, style-checker]
|
|
||||||
steps:
|
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: Download changed aarch64 images
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: changed_images_aarch64
|
|
||||||
path: ${{ runner.temp }}
|
|
||||||
- name: Download changed amd64 images
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: changed_images_amd64
|
|
||||||
path: ${{ runner.temp }}
|
|
||||||
- name: Images check
|
|
||||||
run: |
|
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
|
||||||
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
|
|
||||||
- name: Upload images files to artifacts
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: changed_images
|
|
||||||
path: ${{ runner.temp }}/changed_images.json
|
|
||||||
DocsRelease:
|
|
||||||
needs: DockerHubPush
|
|
||||||
runs-on: [self-hosted, func-tester]
|
|
||||||
steps:
|
|
||||||
- name: Set envs
|
|
||||||
# https://docs.github.com/en/actions/learn-github-actions/workflow-commands-for-github-actions#multiline-strings
|
|
||||||
run: |
|
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
|
||||||
TEMP_PATH=${{runner.temp}}/docs_release
|
|
||||||
REPO_COPY=${{runner.temp}}/docs_release/ClickHouse
|
|
||||||
CLOUDFLARE_TOKEN=${{secrets.CLOUDFLARE}}
|
|
||||||
ROBOT_CLICKHOUSE_SSH_KEY<<RCSK
|
|
||||||
${{secrets.ROBOT_CLICKHOUSE_SSH_KEY}}
|
|
||||||
RCSK
|
|
||||||
EOF
|
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: Download changed images
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: changed_images
|
|
||||||
path: ${{ env.TEMP_PATH }}
|
|
||||||
- name: Docs Release
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci"
|
|
||||||
python3 docs_release.py
|
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker kill "$(docker ps -q)" ||:
|
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
49
.github/workflows/jepsen.yml
vendored
49
.github/workflows/jepsen.yml
vendored
@ -7,38 +7,23 @@ concurrency:
|
|||||||
on: # yamllint disable-line rule:truthy
|
on: # yamllint disable-line rule:truthy
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '0 */6 * * *'
|
- cron: '0 */6 * * *'
|
||||||
workflow_run:
|
|
||||||
workflows: ["PullRequestCI"]
|
|
||||||
types:
|
|
||||||
- completed
|
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
workflow_call:
|
||||||
jobs:
|
jobs:
|
||||||
KeeperJepsenRelease:
|
KeeperJepsenRelease:
|
||||||
runs-on: [self-hosted, style-checker]
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
steps:
|
with:
|
||||||
- name: Set envs
|
test_name: Jepsen keeper check
|
||||||
run: |
|
runner_type: style-checker
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
run_command: |
|
||||||
TEMP_PATH=${{runner.temp}}/keeper_jepsen
|
cd "$REPO_COPY/tests/ci"
|
||||||
REPO_COPY=${{runner.temp}}/keeper_jepsen/ClickHouse
|
python3 jepsen_check.py keeper
|
||||||
EOF
|
# ServerJepsenRelease:
|
||||||
- name: Clear repository
|
# runs-on: [self-hosted, style-checker]
|
||||||
run: |
|
# uses: ./.github/workflows/reusable_test.yml
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
# with:
|
||||||
- name: Check out repository code
|
# test_name: Jepsen server check
|
||||||
uses: actions/checkout@v2
|
# runner_type: style-checker
|
||||||
with:
|
# run_command: |
|
||||||
fetch-depth: 0
|
# cd "$REPO_COPY/tests/ci"
|
||||||
- name: Jepsen Test
|
# python3 jepsen_check.py server
|
||||||
run: |
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci"
|
|
||||||
python3 keeper_jepsen_check.py
|
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker kill "$(docker ps -q)" ||:
|
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
|
26
.github/workflows/libfuzzer.yml
vendored
Normal file
26
.github/workflows/libfuzzer.yml
vendored
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
name: libFuzzer
|
||||||
|
|
||||||
|
env:
|
||||||
|
# Force the stdout and stderr streams to be unbuffered
|
||||||
|
PYTHONUNBUFFERED: 1
|
||||||
|
|
||||||
|
on: # yamllint disable-line rule:truthy
|
||||||
|
# schedule:
|
||||||
|
# - cron: '0 0 2 31 1' # never for now
|
||||||
|
workflow_call:
|
||||||
|
jobs:
|
||||||
|
BuilderFuzzers:
|
||||||
|
uses: ./.github/workflows/reusable_build.yml
|
||||||
|
with:
|
||||||
|
build_name: fuzzers
|
||||||
|
libFuzzerTest:
|
||||||
|
needs: [BuilderFuzzers]
|
||||||
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
|
with:
|
||||||
|
test_name: libFuzzer tests
|
||||||
|
runner_type: func-tester
|
||||||
|
additional_envs: |
|
||||||
|
KILL_TIMEOUT=10800
|
||||||
|
run_command: |
|
||||||
|
cd "$REPO_COPY/tests/ci"
|
||||||
|
python3 libfuzzer_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT"
|
3351
.github/workflows/master.yml
vendored
3351
.github/workflows/master.yml
vendored
File diff suppressed because it is too large
Load Diff
130
.github/workflows/nightly.yml
vendored
130
.github/workflows/nightly.yml
vendored
@ -10,37 +10,38 @@ env:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
Debug:
|
||||||
|
# The task for having a preserved ENV and event.json for later investigation
|
||||||
|
uses: ./.github/workflows/debug.yml
|
||||||
DockerHubPushAarch64:
|
DockerHubPushAarch64:
|
||||||
runs-on: [self-hosted, style-checker-aarch64]
|
runs-on: [self-hosted, style-checker-aarch64]
|
||||||
steps:
|
steps:
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v2
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
- name: Images check
|
- name: Images check
|
||||||
run: |
|
run: |
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
python3 docker_images_check.py --suffix aarch64 --all
|
python3 docker_images_check.py --suffix aarch64 --all
|
||||||
- name: Upload images files to artifacts
|
- name: Upload images files to artifacts
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: changed_images_aarch64
|
name: changed_images_aarch64
|
||||||
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
|
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
|
||||||
DockerHubPushAmd64:
|
DockerHubPushAmd64:
|
||||||
runs-on: [self-hosted, style-checker]
|
runs-on: [self-hosted, style-checker]
|
||||||
steps:
|
steps:
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v2
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
- name: Images check
|
- name: Images check
|
||||||
run: |
|
run: |
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
python3 docker_images_check.py --suffix amd64 --all
|
python3 docker_images_check.py --suffix amd64 --all
|
||||||
- name: Upload images files to artifacts
|
- name: Upload images files to artifacts
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: changed_images_amd64
|
name: changed_images_amd64
|
||||||
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
|
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
|
||||||
@ -48,18 +49,19 @@ jobs:
|
|||||||
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
|
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
|
||||||
runs-on: [self-hosted, style-checker]
|
runs-on: [self-hosted, style-checker]
|
||||||
steps:
|
steps:
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v2
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
fetch-depth: 0 # to find ancestor merge commits necessary for finding proper docker tags
|
||||||
|
filter: tree:0
|
||||||
- name: Download changed aarch64 images
|
- name: Download changed aarch64 images
|
||||||
uses: actions/download-artifact@v2
|
uses: actions/download-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: changed_images_aarch64
|
name: changed_images_aarch64
|
||||||
path: ${{ runner.temp }}
|
path: ${{ runner.temp }}
|
||||||
- name: Download changed amd64 images
|
- name: Download changed amd64 images
|
||||||
uses: actions/download-artifact@v2
|
uses: actions/download-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: changed_images_amd64
|
name: changed_images_amd64
|
||||||
path: ${{ runner.temp }}
|
path: ${{ runner.temp }}
|
||||||
@ -68,56 +70,66 @@ jobs:
|
|||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
|
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
|
||||||
- name: Upload images files to artifacts
|
- name: Upload images files to artifacts
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: changed_images
|
name: changed_images
|
||||||
path: ${{ runner.temp }}/changed_images.json
|
path: ${{ runner.temp }}/changed_images.json
|
||||||
BuilderCoverity:
|
SonarCloud:
|
||||||
needs: DockerHubPush
|
|
||||||
runs-on: [self-hosted, builder]
|
runs-on: [self-hosted, builder]
|
||||||
|
env:
|
||||||
|
SONAR_SCANNER_VERSION: 4.8.0.2856
|
||||||
|
SONAR_SERVER_URL: "https://sonarcloud.io"
|
||||||
|
BUILD_WRAPPER_OUT_DIR: build_wrapper_output_directory # Directory where build-wrapper output will be placed
|
||||||
|
CC: clang-17
|
||||||
|
CXX: clang++-17
|
||||||
steps:
|
steps:
|
||||||
- name: Set envs
|
|
||||||
run: |
|
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
|
||||||
BUILD_NAME=coverity
|
|
||||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
|
||||||
CHECK_NAME=ClickHouse build check (actions)
|
|
||||||
IMAGES_PATH=${{runner.temp}}/images_path
|
|
||||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
|
||||||
TEMP_PATH=${{runner.temp}}/build_check
|
|
||||||
EOF
|
|
||||||
echo "COVERITY_TOKEN=${{ secrets.COVERITY_TOKEN }}" >> "$GITHUB_ENV"
|
|
||||||
- name: Download changed images
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: changed_images
|
|
||||||
path: ${{ env.IMAGES_PATH }}
|
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
id: coverity-checkout
|
uses: ClickHouse/checkout@v1
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
with:
|
||||||
submodules: 'true'
|
clear-repository: true
|
||||||
- name: Build
|
fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
|
||||||
|
filter: tree:0
|
||||||
|
submodules: true
|
||||||
|
- name: Set up JDK 11
|
||||||
|
uses: actions/setup-java@v1
|
||||||
|
with:
|
||||||
|
java-version: 11
|
||||||
|
- name: Download and set up sonar-scanner
|
||||||
|
env:
|
||||||
|
SONAR_SCANNER_DOWNLOAD_URL: https://binaries.sonarsource.com/Distribution/sonar-scanner-cli/sonar-scanner-cli-${{ env.SONAR_SCANNER_VERSION }}-linux.zip
|
||||||
run: |
|
run: |
|
||||||
sudo rm -fr "$TEMP_PATH"
|
mkdir -p "$HOME/.sonar"
|
||||||
mkdir -p "$TEMP_PATH"
|
curl -sSLo "$HOME/.sonar/sonar-scanner.zip" "${{ env.SONAR_SCANNER_DOWNLOAD_URL }}"
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
unzip -o "$HOME/.sonar/sonar-scanner.zip" -d "$HOME/.sonar/"
|
||||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
|
echo "$HOME/.sonar/sonar-scanner-${{ env.SONAR_SCANNER_VERSION }}-linux/bin" >> "$GITHUB_PATH"
|
||||||
- name: Upload Coverity Analysis
|
- name: Download and set up build-wrapper
|
||||||
if: ${{ success() || failure() }}
|
env:
|
||||||
|
BUILD_WRAPPER_DOWNLOAD_URL: ${{ env.SONAR_SERVER_URL }}/static/cpp/build-wrapper-linux-x86.zip
|
||||||
run: |
|
run: |
|
||||||
curl --form token="${COVERITY_TOKEN}" \
|
curl -sSLo "$HOME/.sonar/build-wrapper-linux-x86.zip" "${{ env.BUILD_WRAPPER_DOWNLOAD_URL }}"
|
||||||
--form email='security+coverity@clickhouse.com' \
|
unzip -o "$HOME/.sonar/build-wrapper-linux-x86.zip" -d "$HOME/.sonar/"
|
||||||
--form file="@$TEMP_PATH/$BUILD_NAME/coverity-scan.tgz" \
|
echo "$HOME/.sonar/build-wrapper-linux-x86" >> "$GITHUB_PATH"
|
||||||
--form version="${GITHUB_REF#refs/heads/}-${GITHUB_SHA::6}" \
|
- name: Set Up Build Tools
|
||||||
--form description="Nighly Scan: $(date +'%Y-%m-%dT%H:%M:%S')" \
|
|
||||||
https://scan.coverity.com/builds?project=ClickHouse%2FClickHouse
|
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
sudo apt-get update
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
sudo apt-get install -yq git cmake ccache ninja-build python3 yasm nasm
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)"
|
||||||
|
- name: Run build-wrapper
|
||||||
|
run: |
|
||||||
|
mkdir build
|
||||||
|
cd build
|
||||||
|
cmake ..
|
||||||
|
cd ..
|
||||||
|
build-wrapper-linux-x86-64 --out-dir ${{ env.BUILD_WRAPPER_OUT_DIR }} cmake --build build/
|
||||||
|
- name: Run sonar-scanner
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||||
|
run: |
|
||||||
|
sonar-scanner \
|
||||||
|
--define sonar.host.url="${{ env.SONAR_SERVER_URL }}" \
|
||||||
|
--define sonar.cfamily.build-wrapper-output="${{ env.BUILD_WRAPPER_OUT_DIR }}" \
|
||||||
|
--define sonar.projectKey="ClickHouse_ClickHouse" \
|
||||||
|
--define sonar.organization="clickhouse-java" \
|
||||||
|
--define sonar.cfamily.cpp23.enabled=true \
|
||||||
|
--define sonar.exclusions="**/*.java,**/*.ts,**/*.js,**/*.css,**/*.sql"
|
||||||
|
3782
.github/workflows/pull_request.yml
vendored
3782
.github/workflows/pull_request.yml
vendored
File diff suppressed because it is too large
Load Diff
23
.github/workflows/pull_request_approved.yml
vendored
Normal file
23
.github/workflows/pull_request_approved.yml
vendored
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
name: PullRequestApprovedCI
|
||||||
|
|
||||||
|
env:
|
||||||
|
# Force the stdout and stderr streams to be unbuffered
|
||||||
|
PYTHONUNBUFFERED: 1
|
||||||
|
|
||||||
|
on: # yamllint disable-line rule:truthy
|
||||||
|
pull_request_review:
|
||||||
|
types:
|
||||||
|
- submitted
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
MergeOnApproval:
|
||||||
|
runs-on: [self-hosted, style-checker]
|
||||||
|
steps:
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
- name: Merge approved PR
|
||||||
|
run: |
|
||||||
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
|
python3 merge_pr.py --check-approved
|
63
.github/workflows/release.yml
vendored
63
.github/workflows/release.yml
vendored
@ -1,4 +1,4 @@
|
|||||||
name: ReleaseWorkflow
|
name: PublishedReleaseCI
|
||||||
# - Gets artifacts from S3
|
# - Gets artifacts from S3
|
||||||
# - Sends it to JFROG Artifactory
|
# - Sends it to JFROG Artifactory
|
||||||
# - Adds them to the release assets
|
# - Adds them to the release assets
|
||||||
@ -7,57 +7,60 @@ on: # yamllint disable-line rule:truthy
|
|||||||
release:
|
release:
|
||||||
types:
|
types:
|
||||||
- published
|
- published
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
tag:
|
||||||
|
description: 'Release tag'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
ReleasePublish:
|
ReleasePublish:
|
||||||
runs-on: [self-hosted, style-checker]
|
runs-on: [self-hosted, style-checker]
|
||||||
steps:
|
steps:
|
||||||
- name: Set envs
|
- name: Set tag from input
|
||||||
|
if: github.event_name == 'workflow_dispatch'
|
||||||
run: |
|
run: |
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
echo "GITHUB_TAG=${{ github.event.inputs.tag }}" >> "$GITHUB_ENV"
|
||||||
JFROG_API_KEY=${{ secrets.JFROG_KEY_API_PACKAGES }}
|
- name: Set tag from REF
|
||||||
TEMP_PATH=${{runner.temp}}/release_packages
|
if: github.event_name == 'release'
|
||||||
REPO_COPY=${{runner.temp}}/release_packages/ClickHouse
|
|
||||||
EOF
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: Download packages and push to Artifactory
|
|
||||||
run: |
|
run: |
|
||||||
rm -rf "$TEMP_PATH" && mkdir -p "$TEMP_PATH"
|
echo "GITHUB_TAG=${GITHUB_REF#refs/tags/}" >> "$GITHUB_ENV"
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
- name: Deploy packages and assets
|
||||||
cd "$REPO_COPY"
|
run: |
|
||||||
python3 ./tests/ci/push_to_artifactory.py --release "${{ github.ref }}" \
|
curl --silent --data '' --no-buffer \
|
||||||
--commit '${{ github.sha }}' --all
|
'${{ secrets.PACKAGES_RELEASE_URL }}/release/'"${GITHUB_TAG}"'?binary=binary_darwin&binary=binary_darwin_aarch64&sync=true'
|
||||||
- name: Upload packages to release assets
|
|
||||||
uses: svenstaro/upload-release-action@v2
|
|
||||||
with:
|
|
||||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
file: ${{runner.temp}}/push_to_artifactory/*
|
|
||||||
overwrite: true
|
|
||||||
tag: ${{ github.ref }}
|
|
||||||
file_glob: true
|
|
||||||
############################################################################################
|
############################################################################################
|
||||||
##################################### Docker images #######################################
|
##################################### Docker images #######################################
|
||||||
############################################################################################
|
############################################################################################
|
||||||
DockerServerImages:
|
DockerServerImages:
|
||||||
runs-on: [self-hosted, style-checker]
|
runs-on: [self-hosted, style-checker]
|
||||||
steps:
|
steps:
|
||||||
- name: Clear repository
|
- name: Set tag from input
|
||||||
|
if: github.event_name == 'workflow_dispatch'
|
||||||
run: |
|
run: |
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
echo "GITHUB_TAG=${{ github.event.inputs.tag }}" >> "$GITHUB_ENV"
|
||||||
|
- name: Set tag from REF
|
||||||
|
if: github.event_name == 'release'
|
||||||
|
run: |
|
||||||
|
echo "GITHUB_TAG=${GITHUB_REF#refs/tags/}" >> "$GITHUB_ENV"
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v2
|
uses: ClickHouse/checkout@v1
|
||||||
with:
|
with:
|
||||||
|
clear-repository: true
|
||||||
fetch-depth: 0 # otherwise we will have no version info
|
fetch-depth: 0 # otherwise we will have no version info
|
||||||
|
filter: tree:0
|
||||||
|
ref: ${{ env.GITHUB_TAG }}
|
||||||
- name: Check docker clickhouse/clickhouse-server building
|
- name: Check docker clickhouse/clickhouse-server building
|
||||||
run: |
|
run: |
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
python3 docker_server.py --release-type auto --version "${{ github.ref }}"
|
python3 docker_server.py --release-type auto --version "$GITHUB_TAG" \
|
||||||
python3 docker_server.py --release-type auto --version "${{ github.ref }}" --no-ubuntu \
|
--image-repo clickhouse/clickhouse-server --image-path docker/server
|
||||||
|
python3 docker_server.py --release-type auto --version "$GITHUB_TAG" \
|
||||||
--image-repo clickhouse/clickhouse-keeper --image-path docker/keeper
|
--image-repo clickhouse/clickhouse-keeper --image-path docker/keeper
|
||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
1983
.github/workflows/release_branches.yml
vendored
1983
.github/workflows/release_branches.yml
vendored
File diff suppressed because it is too large
Load Diff
79
.github/workflows/reusable_build.yml
vendored
Normal file
79
.github/workflows/reusable_build.yml
vendored
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
### For the pure soul wishes to move it to another place
|
||||||
|
# https://github.com/orgs/community/discussions/9050
|
||||||
|
|
||||||
|
env:
|
||||||
|
# Force the stdout and stderr streams to be unbuffered
|
||||||
|
PYTHONUNBUFFERED: 1
|
||||||
|
|
||||||
|
name: Build ClickHouse
|
||||||
|
'on':
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
build_name:
|
||||||
|
description: the value of build type from tests/ci/ci_config.py
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
checkout_depth:
|
||||||
|
description: the value of the git shallow checkout
|
||||||
|
required: false
|
||||||
|
type: number
|
||||||
|
default: 1
|
||||||
|
runner_type:
|
||||||
|
description: the label of runner to use
|
||||||
|
default: builder
|
||||||
|
type: string
|
||||||
|
additional_envs:
|
||||||
|
description: additional ENV variables to setup the job
|
||||||
|
type: string
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
Build:
|
||||||
|
name: Build-${{inputs.build_name}}
|
||||||
|
env:
|
||||||
|
GITHUB_JOB_OVERRIDDEN: Build-${{inputs.build_name}}
|
||||||
|
runs-on: [self-hosted, '${{inputs.runner_type}}']
|
||||||
|
steps:
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
submodules: true
|
||||||
|
fetch-depth: ${{inputs.checkout_depth}}
|
||||||
|
filter: tree:0
|
||||||
|
- name: Set build envs
|
||||||
|
run: |
|
||||||
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
|
${{inputs.additional_envs}}
|
||||||
|
EOF
|
||||||
|
python3 "$GITHUB_WORKSPACE"/tests/ci/ci_config.py --build-name "${{inputs.build_name}}" >> "$GITHUB_ENV"
|
||||||
|
- name: Apply sparse checkout for contrib # in order to check that it doesn't break build
|
||||||
|
# This step is done in GITHUB_WORKSPACE,
|
||||||
|
# because it's broken in REPO_COPY for some reason
|
||||||
|
if: ${{ env.BUILD_SPARSE_CHECKOUT == 'true' }}
|
||||||
|
run: |
|
||||||
|
rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed'
|
||||||
|
git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored'
|
||||||
|
"$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK'
|
||||||
|
du -hs "$GITHUB_WORKSPACE/contrib" ||:
|
||||||
|
find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||:
|
||||||
|
- name: Common setup
|
||||||
|
uses: ./.github/actions/common_setup
|
||||||
|
with:
|
||||||
|
job_type: build_check
|
||||||
|
- name: Download changed images
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: changed_images
|
||||||
|
path: ${{ env.IMAGES_PATH }}
|
||||||
|
- name: Build
|
||||||
|
run: |
|
||||||
|
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||||
|
- name: Upload build URLs to artifacts
|
||||||
|
if: ${{ success() || failure() }}
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: ${{ env.BUILD_URLS }}
|
||||||
|
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
||||||
|
- name: Clean
|
||||||
|
if: always()
|
||||||
|
uses: ./.github/actions/clean
|
113
.github/workflows/reusable_test.yml
vendored
Normal file
113
.github/workflows/reusable_test.yml
vendored
Normal file
@ -0,0 +1,113 @@
|
|||||||
|
### For the pure soul wishes to move it to another place
|
||||||
|
# https://github.com/orgs/community/discussions/9050
|
||||||
|
|
||||||
|
name: Testing workflow
|
||||||
|
'on':
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
test_name:
|
||||||
|
description: the value of test type from tests/ci/ci_config.py, ends up as $CHECK_NAME ENV
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
runner_type:
|
||||||
|
description: the label of runner to use
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
run_command:
|
||||||
|
description: the command to launch the check. Usually starts with `cd '$REPO_COPY/tests/ci'`
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
batches:
|
||||||
|
description: how many batches for the test will be launched
|
||||||
|
default: 1
|
||||||
|
type: number
|
||||||
|
checkout_depth:
|
||||||
|
description: the value of the git shallow checkout
|
||||||
|
required: false
|
||||||
|
type: number
|
||||||
|
default: 1
|
||||||
|
submodules:
|
||||||
|
description: if the submodules should be checked out
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
additional_envs:
|
||||||
|
description: additional ENV variables to setup the job
|
||||||
|
type: string
|
||||||
|
secrets:
|
||||||
|
secret_envs:
|
||||||
|
description: if given, it's passed to the environments
|
||||||
|
required: false
|
||||||
|
|
||||||
|
env:
|
||||||
|
# Force the stdout and stderr streams to be unbuffered
|
||||||
|
PYTHONUNBUFFERED: 1
|
||||||
|
CHECK_NAME: ${{inputs.test_name}}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
PrepareStrategy:
|
||||||
|
# batches < 1 is misconfiguration,
|
||||||
|
# and we need this step only for batches > 1
|
||||||
|
if: ${{ inputs.batches > 1 }}
|
||||||
|
runs-on: [self-hosted, style-checker-aarch64]
|
||||||
|
outputs:
|
||||||
|
batches: ${{steps.batches.outputs.batches}}
|
||||||
|
steps:
|
||||||
|
- name: Calculate batches
|
||||||
|
id: batches
|
||||||
|
run: |
|
||||||
|
batches_output=$(python3 -c 'import json; print(json.dumps(list(range(${{inputs.batches}}))))')
|
||||||
|
echo "batches=${batches_output}" >> "$GITHUB_OUTPUT"
|
||||||
|
Test:
|
||||||
|
# If PrepareStrategy is skipped for batches == 1,
|
||||||
|
# we still need to launch the test.
|
||||||
|
# `! failure()` is mandatory here to launch on skipped Job
|
||||||
|
# `&& !cancelled()` to allow the be cancelable
|
||||||
|
if: ${{ ( !failure() && !cancelled() ) && inputs.batches > 0 }}
|
||||||
|
# Do not add `-0` to the end, if there's only one batch
|
||||||
|
name: ${{inputs.test_name}}${{ inputs.batches > 1 && format('-{0}',matrix.batch) || '' }}
|
||||||
|
env:
|
||||||
|
GITHUB_JOB_OVERRIDDEN: ${{inputs.test_name}}${{ inputs.batches > 1 && format('-{0}',matrix.batch) || '' }}
|
||||||
|
runs-on: [self-hosted, '${{inputs.runner_type}}']
|
||||||
|
needs: [PrepareStrategy]
|
||||||
|
strategy:
|
||||||
|
fail-fast: false # we always wait for entire matrix
|
||||||
|
matrix:
|
||||||
|
# if PrepareStrategy does not have batches, we use 0
|
||||||
|
batch: ${{ needs.PrepareStrategy.outputs.batches
|
||||||
|
&& fromJson(needs.PrepareStrategy.outputs.batches)
|
||||||
|
|| fromJson('[0]')}}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
submodules: ${{inputs.submodules}}
|
||||||
|
fetch-depth: ${{inputs.checkout_depth}}
|
||||||
|
filter: tree:0
|
||||||
|
- name: Set build envs
|
||||||
|
run: |
|
||||||
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
|
${{inputs.additional_envs}}
|
||||||
|
${{secrets.secret_envs}}
|
||||||
|
EOF
|
||||||
|
- name: Common setup
|
||||||
|
uses: ./.github/actions/common_setup
|
||||||
|
with:
|
||||||
|
job_type: test
|
||||||
|
- name: Download json reports
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
path: ${{ env.REPORTS_PATH }}
|
||||||
|
- name: Setup batch
|
||||||
|
if: ${{ inputs.batches > 1}}
|
||||||
|
run: |
|
||||||
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
|
RUN_BY_HASH_NUM=${{matrix.batch}}
|
||||||
|
RUN_BY_HASH_TOTAL=${{inputs.batches}}
|
||||||
|
EOF
|
||||||
|
- name: Run test
|
||||||
|
run: ${{inputs.run_command}}
|
||||||
|
- name: Clean
|
||||||
|
if: always()
|
||||||
|
uses: ./.github/actions/clean
|
49
.github/workflows/tags_stable.yml
vendored
49
.github/workflows/tags_stable.yml
vendored
@ -3,38 +3,71 @@ name: TagsStableWorkflow
|
|||||||
# - Sends it to JFROG Artifactory
|
# - Sends it to JFROG Artifactory
|
||||||
# - Adds them to the release assets
|
# - Adds them to the release assets
|
||||||
|
|
||||||
|
env:
|
||||||
|
# Force the stdout and stderr streams to be unbuffered
|
||||||
|
PYTHONUNBUFFERED: 1
|
||||||
|
|
||||||
on: # yamllint disable-line rule:truthy
|
on: # yamllint disable-line rule:truthy
|
||||||
push:
|
push:
|
||||||
tags:
|
tags:
|
||||||
|
- 'v*-prestable'
|
||||||
- 'v*-stable'
|
- 'v*-stable'
|
||||||
- 'v*-lts'
|
- 'v*-lts'
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
tag:
|
||||||
|
description: 'Test tag'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
UpdateVersions:
|
UpdateVersions:
|
||||||
runs-on: [self-hosted, style-checker]
|
runs-on: [self-hosted, style-checker]
|
||||||
steps:
|
steps:
|
||||||
|
- name: Set test tag
|
||||||
|
if: github.event_name == 'workflow_dispatch'
|
||||||
|
run: |
|
||||||
|
echo "GITHUB_TAG=${{ github.event.inputs.tag }}" >> "$GITHUB_ENV"
|
||||||
- name: Get tag name
|
- name: Get tag name
|
||||||
run: echo "GITHUB_TAG=${GITHUB_REF#refs/tags/}" >> "$GITHUB_ENV"
|
if: github.event_name != 'workflow_dispatch'
|
||||||
|
run: |
|
||||||
|
echo "GITHUB_TAG=${GITHUB_REF#refs/tags/}" >> "$GITHUB_ENV"
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v2
|
uses: ClickHouse/checkout@v1
|
||||||
with:
|
with:
|
||||||
ref: master
|
ref: master
|
||||||
- name: Generate versions
|
fetch-depth: 0
|
||||||
|
filter: tree:0
|
||||||
|
- name: Update versions, docker version, changelog, security
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
git fetch --tags
|
|
||||||
./utils/list-versions/list-versions.sh > ./utils/list-versions/version_date.tsv
|
./utils/list-versions/list-versions.sh > ./utils/list-versions/version_date.tsv
|
||||||
|
./utils/list-versions/update-docker-version.sh
|
||||||
|
GID=$(id -g "${UID}")
|
||||||
|
docker run -u "${UID}:${GID}" -e PYTHONUNBUFFERED=1 \
|
||||||
|
--volume="${GITHUB_WORKSPACE}:/ClickHouse" clickhouse/style-test \
|
||||||
|
/ClickHouse/utils/changelog/changelog.py -v --debug-helpers \
|
||||||
|
--gh-user-or-token="$GITHUB_TOKEN" --jobs=5 \
|
||||||
|
--output="/ClickHouse/docs/changelogs/${GITHUB_TAG}.md" "${GITHUB_TAG}"
|
||||||
|
git add "./docs/changelogs/${GITHUB_TAG}.md"
|
||||||
|
python3 ./utils/security-generator/generate_security.py > SECURITY.md
|
||||||
|
git diff HEAD
|
||||||
- name: Create Pull Request
|
- name: Create Pull Request
|
||||||
uses: peter-evans/create-pull-request@v3
|
uses: peter-evans/create-pull-request@v3
|
||||||
with:
|
with:
|
||||||
author: "robot-clickhouse <robot-clickhouse@users.noreply.github.com>"
|
author: "robot-clickhouse <robot-clickhouse@users.noreply.github.com>"
|
||||||
|
token: ${{ secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN }}
|
||||||
committer: "robot-clickhouse <robot-clickhouse@users.noreply.github.com>"
|
committer: "robot-clickhouse <robot-clickhouse@users.noreply.github.com>"
|
||||||
commit-message: Update version_date.tsv after ${{ env.GITHUB_TAG }}
|
commit-message: Update version_date.tsv and changelogs after ${{ env.GITHUB_TAG }}
|
||||||
branch: auto/${{ env.GITHUB_TAG }}
|
branch: auto/${{ env.GITHUB_TAG }}
|
||||||
|
assignees: ${{ github.event.sender.login }} # assign the PR to the tag pusher
|
||||||
delete-branch: true
|
delete-branch: true
|
||||||
title: Update version_date.tsv after ${{ env.GITHUB_TAG }}
|
title: Update version_date.tsv and changelogs after ${{ env.GITHUB_TAG }}
|
||||||
|
labels: do not test
|
||||||
body: |
|
body: |
|
||||||
Update version_date.tsv after ${{ env.GITHUB_TAG }}
|
Update version_date.tsv and changelogs after ${{ env.GITHUB_TAG }}
|
||||||
|
|
||||||
Changelog category (leave one):
|
### Changelog category (leave one):
|
||||||
- Not for changelog (changelog entry is not required)
|
- Not for changelog (changelog entry is not required)
|
||||||
|
42
.github/workflows/woboq.yml
vendored
42
.github/workflows/woboq.yml
vendored
@ -1,42 +0,0 @@
|
|||||||
name: WoboqBuilder
|
|
||||||
env:
|
|
||||||
# Force the stdout and stderr streams to be unbuffered
|
|
||||||
PYTHONUNBUFFERED: 1
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: woboq
|
|
||||||
on: # yamllint disable-line rule:truthy
|
|
||||||
schedule:
|
|
||||||
- cron: '0 */18 * * *'
|
|
||||||
workflow_dispatch:
|
|
||||||
jobs:
|
|
||||||
# don't use dockerhub push because this image updates so rarely
|
|
||||||
WoboqCodebrowser:
|
|
||||||
runs-on: [self-hosted, style-checker]
|
|
||||||
steps:
|
|
||||||
- name: Set envs
|
|
||||||
run: |
|
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
|
||||||
TEMP_PATH=${{runner.temp}}/codebrowser
|
|
||||||
REPO_COPY=${{runner.temp}}/codebrowser/ClickHouse
|
|
||||||
IMAGES_PATH=${{runner.temp}}/images_path
|
|
||||||
EOF
|
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
submodules: 'true'
|
|
||||||
- name: Codebrowser
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci" && python3 codebrowser_check.py
|
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker kill "$(docker ps -q)" ||:
|
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
26
.gitignore
vendored
26
.gitignore
vendored
@ -17,6 +17,7 @@
|
|||||||
|
|
||||||
# logs
|
# logs
|
||||||
*.log
|
*.log
|
||||||
|
*.debuglog
|
||||||
*.stderr
|
*.stderr
|
||||||
*.stdout
|
*.stdout
|
||||||
|
|
||||||
@ -33,10 +34,6 @@
|
|||||||
/docs/zh/single.md
|
/docs/zh/single.md
|
||||||
/docs/ja/single.md
|
/docs/ja/single.md
|
||||||
/docs/fa/single.md
|
/docs/fa/single.md
|
||||||
/docs/en/development/cmake-in-clickhouse.md
|
|
||||||
/docs/ja/development/cmake-in-clickhouse.md
|
|
||||||
/docs/zh/development/cmake-in-clickhouse.md
|
|
||||||
/docs/ru/development/cmake-in-clickhouse.md
|
|
||||||
|
|
||||||
# callgrind files
|
# callgrind files
|
||||||
callgrind.out.*
|
callgrind.out.*
|
||||||
@ -62,12 +59,17 @@ cmake_install.cmake
|
|||||||
CTestTestfile.cmake
|
CTestTestfile.cmake
|
||||||
*.a
|
*.a
|
||||||
*.o
|
*.o
|
||||||
|
*.so
|
||||||
|
*.dll
|
||||||
|
*.lib
|
||||||
|
*.dylib
|
||||||
cmake-build-*
|
cmake-build-*
|
||||||
|
|
||||||
# Python cache
|
# Python cache
|
||||||
*.pyc
|
*.pyc
|
||||||
__pycache__
|
__pycache__
|
||||||
*.pytest_cache
|
*.pytest_cache
|
||||||
|
.mypy_cache
|
||||||
|
|
||||||
test.cpp
|
test.cpp
|
||||||
CPackConfig.cmake
|
CPackConfig.cmake
|
||||||
@ -80,6 +82,7 @@ core
|
|||||||
vgcore*
|
vgcore*
|
||||||
|
|
||||||
*.deb
|
*.deb
|
||||||
|
*.tar.zst
|
||||||
*.build
|
*.build
|
||||||
*.upload
|
*.upload
|
||||||
*.changes
|
*.changes
|
||||||
@ -127,7 +130,6 @@ website/package-lock.json
|
|||||||
/.ccls-cache
|
/.ccls-cache
|
||||||
|
|
||||||
# clangd cache
|
# clangd cache
|
||||||
/.clangd
|
|
||||||
/.cache
|
/.cache
|
||||||
|
|
||||||
/compile_commands.json
|
/compile_commands.json
|
||||||
@ -152,4 +154,18 @@ website/package-lock.json
|
|||||||
/programs/server/data
|
/programs/server/data
|
||||||
/programs/server/metadata
|
/programs/server/metadata
|
||||||
/programs/server/store
|
/programs/server/store
|
||||||
|
/programs/server/uuid
|
||||||
|
/programs/server/coordination
|
||||||
|
|
||||||
|
# temporary test files
|
||||||
|
tests/queries/0_stateless/test_*
|
||||||
|
tests/queries/0_stateless/*.binary
|
||||||
|
tests/queries/0_stateless/*.generated-expect
|
||||||
|
tests/queries/0_stateless/*.expect.history
|
||||||
|
tests/integration/**/_gen
|
||||||
|
|
||||||
|
# rust
|
||||||
|
/rust/**/target
|
||||||
|
# It is autogenerated from *.in
|
||||||
|
/rust/**/.cargo/config.toml
|
||||||
|
/rust/**/vendor
|
||||||
|
278
.gitmodules
vendored
278
.gitmodules
vendored
@ -1,100 +1,81 @@
|
|||||||
[submodule "contrib/poco"]
|
# Please do not use 'branch = ...' tags with submodule entries. Such tags make updating submodules a
|
||||||
path = contrib/poco
|
# little bit more convenient but they do *not* specify the tracked submodule branch. Thus, they are
|
||||||
url = https://github.com/ClickHouse/poco.git
|
# more confusing than useful.
|
||||||
branch = clickhouse
|
|
||||||
[submodule "contrib/zstd"]
|
[submodule "contrib/zstd"]
|
||||||
path = contrib/zstd
|
path = contrib/zstd
|
||||||
url = https://github.com/facebook/zstd.git
|
url = https://github.com/facebook/zstd
|
||||||
[submodule "contrib/lz4"]
|
[submodule "contrib/lz4"]
|
||||||
path = contrib/lz4
|
path = contrib/lz4
|
||||||
url = https://github.com/lz4/lz4.git
|
url = https://github.com/ClickHouse/lz4
|
||||||
[submodule "contrib/librdkafka"]
|
[submodule "contrib/librdkafka"]
|
||||||
path = contrib/librdkafka
|
path = contrib/librdkafka
|
||||||
url = https://github.com/ClickHouse/librdkafka.git
|
url = https://github.com/ClickHouse/librdkafka
|
||||||
[submodule "contrib/cctz"]
|
[submodule "contrib/cctz"]
|
||||||
path = contrib/cctz
|
path = contrib/cctz
|
||||||
url = https://github.com/ClickHouse/cctz.git
|
url = https://github.com/ClickHouse/cctz
|
||||||
[submodule "contrib/zlib-ng"]
|
[submodule "contrib/zlib-ng"]
|
||||||
path = contrib/zlib-ng
|
path = contrib/zlib-ng
|
||||||
url = https://github.com/ClickHouse/zlib-ng.git
|
url = https://github.com/ClickHouse/zlib-ng
|
||||||
branch = clickhouse-2.0.x
|
|
||||||
[submodule "contrib/googletest"]
|
[submodule "contrib/googletest"]
|
||||||
path = contrib/googletest
|
path = contrib/googletest
|
||||||
url = https://github.com/google/googletest.git
|
url = https://github.com/google/googletest
|
||||||
[submodule "contrib/capnproto"]
|
[submodule "contrib/capnproto"]
|
||||||
path = contrib/capnproto
|
path = contrib/capnproto
|
||||||
url = https://github.com/capnproto/capnproto.git
|
url = https://github.com/ClickHouse/capnproto
|
||||||
[submodule "contrib/double-conversion"]
|
[submodule "contrib/double-conversion"]
|
||||||
path = contrib/double-conversion
|
path = contrib/double-conversion
|
||||||
url = https://github.com/google/double-conversion.git
|
url = https://github.com/google/double-conversion
|
||||||
[submodule "contrib/re2"]
|
[submodule "contrib/re2"]
|
||||||
path = contrib/re2
|
path = contrib/re2
|
||||||
url = https://github.com/google/re2.git
|
url = https://github.com/google/re2
|
||||||
[submodule "contrib/llvm"]
|
|
||||||
path = contrib/llvm
|
|
||||||
url = https://github.com/ClickHouse/llvm
|
|
||||||
[submodule "contrib/mariadb-connector-c"]
|
[submodule "contrib/mariadb-connector-c"]
|
||||||
path = contrib/mariadb-connector-c
|
path = contrib/mariadb-connector-c
|
||||||
url = https://github.com/ClickHouse/mariadb-connector-c.git
|
url = https://github.com/ClickHouse/mariadb-connector-c
|
||||||
[submodule "contrib/jemalloc"]
|
[submodule "contrib/jemalloc"]
|
||||||
path = contrib/jemalloc
|
path = contrib/jemalloc
|
||||||
url = https://github.com/jemalloc/jemalloc.git
|
url = https://github.com/jemalloc/jemalloc
|
||||||
[submodule "contrib/unixodbc"]
|
[submodule "contrib/unixodbc"]
|
||||||
path = contrib/unixodbc
|
path = contrib/unixodbc
|
||||||
url = https://github.com/ClickHouse/UnixODBC.git
|
url = https://github.com/ClickHouse/UnixODBC
|
||||||
[submodule "contrib/protobuf"]
|
[submodule "contrib/google-protobuf"]
|
||||||
path = contrib/protobuf
|
path = contrib/google-protobuf
|
||||||
url = https://github.com/ClickHouse/protobuf.git
|
url = https://github.com/ClickHouse/google-protobuf.git
|
||||||
branch = v3.13.0.1
|
|
||||||
[submodule "contrib/boost"]
|
[submodule "contrib/boost"]
|
||||||
path = contrib/boost
|
path = contrib/boost
|
||||||
url = https://github.com/ClickHouse/boost.git
|
url = https://github.com/ClickHouse/boost
|
||||||
[submodule "contrib/base64"]
|
|
||||||
path = contrib/base64
|
|
||||||
url = https://github.com/ClickHouse/Turbo-Base64.git
|
|
||||||
[submodule "contrib/arrow"]
|
[submodule "contrib/arrow"]
|
||||||
path = contrib/arrow
|
path = contrib/arrow
|
||||||
url = https://github.com/ClickHouse/arrow.git
|
url = https://github.com/ClickHouse/arrow
|
||||||
branch = blessed/release-6.0.1
|
|
||||||
[submodule "contrib/thrift"]
|
[submodule "contrib/thrift"]
|
||||||
path = contrib/thrift
|
path = contrib/thrift
|
||||||
url = https://github.com/apache/thrift.git
|
url = https://github.com/apache/thrift
|
||||||
[submodule "contrib/libhdfs3"]
|
[submodule "contrib/libhdfs3"]
|
||||||
path = contrib/libhdfs3
|
path = contrib/libhdfs3
|
||||||
url = https://github.com/ClickHouse/libhdfs3.git
|
url = https://github.com/ClickHouse/libhdfs3
|
||||||
[submodule "contrib/libxml2"]
|
[submodule "contrib/libxml2"]
|
||||||
path = contrib/libxml2
|
path = contrib/libxml2
|
||||||
url = https://github.com/GNOME/libxml2.git
|
url = https://github.com/GNOME/libxml2
|
||||||
[submodule "contrib/libgsasl"]
|
[submodule "contrib/libgsasl"]
|
||||||
path = contrib/libgsasl
|
path = contrib/libgsasl
|
||||||
url = https://github.com/ClickHouse/libgsasl.git
|
url = https://github.com/ClickHouse/libgsasl
|
||||||
[submodule "contrib/libcxx"]
|
|
||||||
path = contrib/libcxx
|
|
||||||
url = https://github.com/ClickHouse/libcxx.git
|
|
||||||
[submodule "contrib/libcxxabi"]
|
|
||||||
path = contrib/libcxxabi
|
|
||||||
url = https://github.com/ClickHouse/libcxxabi.git
|
|
||||||
[submodule "contrib/snappy"]
|
[submodule "contrib/snappy"]
|
||||||
path = contrib/snappy
|
path = contrib/snappy
|
||||||
url = https://github.com/ClickHouse/snappy.git
|
url = https://github.com/ClickHouse/snappy
|
||||||
[submodule "contrib/cppkafka"]
|
[submodule "contrib/cppkafka"]
|
||||||
path = contrib/cppkafka
|
path = contrib/cppkafka
|
||||||
url = https://github.com/mfontanini/cppkafka.git
|
url = https://github.com/mfontanini/cppkafka
|
||||||
[submodule "contrib/brotli"]
|
[submodule "contrib/brotli"]
|
||||||
path = contrib/brotli
|
path = contrib/brotli
|
||||||
url = https://github.com/google/brotli.git
|
url = https://github.com/google/brotli
|
||||||
[submodule "contrib/h3"]
|
[submodule "contrib/h3"]
|
||||||
path = contrib/h3
|
path = contrib/h3
|
||||||
url = https://github.com/ClickHouse/h3
|
url = https://github.com/ClickHouse/h3
|
||||||
[submodule "contrib/hyperscan"]
|
|
||||||
path = contrib/hyperscan
|
|
||||||
url = https://github.com/ClickHouse/hyperscan.git
|
|
||||||
[submodule "contrib/libunwind"]
|
[submodule "contrib/libunwind"]
|
||||||
path = contrib/libunwind
|
path = contrib/libunwind
|
||||||
url = https://github.com/ClickHouse/libunwind.git
|
url = https://github.com/ClickHouse/libunwind
|
||||||
[submodule "contrib/simdjson"]
|
[submodule "contrib/simdjson"]
|
||||||
path = contrib/simdjson
|
path = contrib/simdjson
|
||||||
url = https://github.com/simdjson/simdjson.git
|
url = https://github.com/simdjson/simdjson
|
||||||
[submodule "contrib/rapidjson"]
|
[submodule "contrib/rapidjson"]
|
||||||
path = contrib/rapidjson
|
path = contrib/rapidjson
|
||||||
url = https://github.com/ClickHouse/rapidjson
|
url = https://github.com/ClickHouse/rapidjson
|
||||||
@ -106,95 +87,89 @@
|
|||||||
url = https://github.com/ClickHouse/orc
|
url = https://github.com/ClickHouse/orc
|
||||||
[submodule "contrib/sparsehash-c11"]
|
[submodule "contrib/sparsehash-c11"]
|
||||||
path = contrib/sparsehash-c11
|
path = contrib/sparsehash-c11
|
||||||
url = https://github.com/sparsehash/sparsehash-c11.git
|
url = https://github.com/sparsehash/sparsehash-c11
|
||||||
[submodule "contrib/grpc"]
|
[submodule "contrib/grpc"]
|
||||||
path = contrib/grpc
|
path = contrib/grpc
|
||||||
url = https://github.com/ClickHouse/grpc.git
|
url = https://github.com/ClickHouse/grpc
|
||||||
branch = v1.33.2
|
|
||||||
[submodule "contrib/aws"]
|
[submodule "contrib/aws"]
|
||||||
path = contrib/aws
|
path = contrib/aws
|
||||||
url = https://github.com/ClickHouse/aws-sdk-cpp.git
|
url = https://github.com/ClickHouse/aws-sdk-cpp
|
||||||
[submodule "aws-c-event-stream"]
|
[submodule "aws-c-event-stream"]
|
||||||
path = contrib/aws-c-event-stream
|
path = contrib/aws-c-event-stream
|
||||||
url = https://github.com/ClickHouse/aws-c-event-stream.git
|
url = https://github.com/awslabs/aws-c-event-stream
|
||||||
[submodule "aws-c-common"]
|
[submodule "aws-c-common"]
|
||||||
path = contrib/aws-c-common
|
path = contrib/aws-c-common
|
||||||
url = https://github.com/ClickHouse/aws-c-common.git
|
url = https://github.com/ClickHouse/aws-c-common
|
||||||
[submodule "aws-checksums"]
|
[submodule "aws-checksums"]
|
||||||
path = contrib/aws-checksums
|
path = contrib/aws-checksums
|
||||||
url = https://github.com/ClickHouse/aws-checksums.git
|
url = https://github.com/awslabs/aws-checksums
|
||||||
[submodule "contrib/curl"]
|
[submodule "contrib/curl"]
|
||||||
path = contrib/curl
|
path = contrib/curl
|
||||||
url = https://github.com/curl/curl.git
|
url = https://github.com/curl/curl
|
||||||
[submodule "contrib/icudata"]
|
[submodule "contrib/icudata"]
|
||||||
path = contrib/icudata
|
path = contrib/icudata
|
||||||
url = https://github.com/ClickHouse/icudata.git
|
url = https://github.com/ClickHouse/icudata
|
||||||
[submodule "contrib/icu"]
|
[submodule "contrib/icu"]
|
||||||
path = contrib/icu
|
path = contrib/icu
|
||||||
url = https://github.com/unicode-org/icu.git
|
url = https://github.com/unicode-org/icu
|
||||||
[submodule "contrib/flatbuffers"]
|
[submodule "contrib/flatbuffers"]
|
||||||
path = contrib/flatbuffers
|
path = contrib/flatbuffers
|
||||||
url = https://github.com/ClickHouse/flatbuffers.git
|
url = https://github.com/ClickHouse/flatbuffers
|
||||||
[submodule "contrib/replxx"]
|
[submodule "contrib/replxx"]
|
||||||
path = contrib/replxx
|
path = contrib/replxx
|
||||||
url = https://github.com/ClickHouse/replxx.git
|
url = https://github.com/ClickHouse/replxx
|
||||||
[submodule "contrib/avro"]
|
[submodule "contrib/avro"]
|
||||||
path = contrib/avro
|
path = contrib/avro
|
||||||
url = https://github.com/ClickHouse/avro.git
|
url = https://github.com/ClickHouse/avro
|
||||||
ignore = untracked
|
ignore = untracked
|
||||||
[submodule "contrib/msgpack-c"]
|
[submodule "contrib/msgpack-c"]
|
||||||
path = contrib/msgpack-c
|
path = contrib/msgpack-c
|
||||||
url = https://github.com/msgpack/msgpack-c
|
url = https://github.com/msgpack/msgpack-c
|
||||||
[submodule "contrib/libcpuid"]
|
[submodule "contrib/libcpuid"]
|
||||||
path = contrib/libcpuid
|
path = contrib/libcpuid
|
||||||
url = https://github.com/ClickHouse/libcpuid.git
|
url = https://github.com/ClickHouse/libcpuid
|
||||||
[submodule "contrib/openldap"]
|
[submodule "contrib/openldap"]
|
||||||
path = contrib/openldap
|
path = contrib/openldap
|
||||||
url = https://github.com/ClickHouse/openldap.git
|
url = https://github.com/ClickHouse/openldap
|
||||||
[submodule "contrib/AMQP-CPP"]
|
[submodule "contrib/AMQP-CPP"]
|
||||||
path = contrib/AMQP-CPP
|
path = contrib/AMQP-CPP
|
||||||
url = https://github.com/ClickHouse/AMQP-CPP.git
|
url = https://github.com/ClickHouse/AMQP-CPP
|
||||||
[submodule "contrib/cassandra"]
|
[submodule "contrib/cassandra"]
|
||||||
path = contrib/cassandra
|
path = contrib/cassandra
|
||||||
url = https://github.com/ClickHouse/cpp-driver.git
|
url = https://github.com/ClickHouse/cpp-driver
|
||||||
branch = clickhouse
|
|
||||||
[submodule "contrib/libuv"]
|
[submodule "contrib/libuv"]
|
||||||
path = contrib/libuv
|
path = contrib/libuv
|
||||||
url = https://github.com/ClickHouse/libuv.git
|
url = https://github.com/ClickHouse/libuv
|
||||||
branch = clickhouse
|
|
||||||
[submodule "contrib/fmtlib"]
|
[submodule "contrib/fmtlib"]
|
||||||
path = contrib/fmtlib
|
path = contrib/fmtlib
|
||||||
url = https://github.com/fmtlib/fmt.git
|
url = https://github.com/fmtlib/fmt
|
||||||
[submodule "contrib/sentry-native"]
|
[submodule "contrib/sentry-native"]
|
||||||
path = contrib/sentry-native
|
path = contrib/sentry-native
|
||||||
url = https://github.com/ClickHouse/sentry-native.git
|
url = https://github.com/ClickHouse/sentry-native
|
||||||
[submodule "contrib/krb5"]
|
[submodule "contrib/krb5"]
|
||||||
path = contrib/krb5
|
path = contrib/krb5
|
||||||
url = https://github.com/ClickHouse/krb5
|
url = https://github.com/ClickHouse/krb5
|
||||||
[submodule "contrib/cyrus-sasl"]
|
[submodule "contrib/cyrus-sasl"]
|
||||||
path = contrib/cyrus-sasl
|
path = contrib/cyrus-sasl
|
||||||
url = https://github.com/ClickHouse/cyrus-sasl
|
url = https://github.com/ClickHouse/cyrus-sasl
|
||||||
branch = cyrus-sasl-2.1
|
|
||||||
[submodule "contrib/croaring"]
|
[submodule "contrib/croaring"]
|
||||||
path = contrib/croaring
|
path = contrib/croaring
|
||||||
url = https://github.com/RoaringBitmap/CRoaring
|
url = https://github.com/RoaringBitmap/CRoaring
|
||||||
branch = v0.2.66
|
|
||||||
[submodule "contrib/miniselect"]
|
[submodule "contrib/miniselect"]
|
||||||
path = contrib/miniselect
|
path = contrib/miniselect
|
||||||
url = https://github.com/danlark1/miniselect
|
url = https://github.com/danlark1/miniselect
|
||||||
[submodule "contrib/rocksdb"]
|
[submodule "contrib/rocksdb"]
|
||||||
path = contrib/rocksdb
|
path = contrib/rocksdb
|
||||||
url = https://github.com/ClickHouse/rocksdb.git
|
url = https://github.com/ClickHouse/rocksdb
|
||||||
[submodule "contrib/xz"]
|
[submodule "contrib/xz"]
|
||||||
path = contrib/xz
|
path = contrib/xz
|
||||||
url = https://github.com/xz-mirror/xz
|
url = https://github.com/xz-mirror/xz
|
||||||
[submodule "contrib/abseil-cpp"]
|
[submodule "contrib/abseil-cpp"]
|
||||||
path = contrib/abseil-cpp
|
path = contrib/abseil-cpp
|
||||||
url = https://github.com/abseil/abseil-cpp.git
|
url = https://github.com/abseil/abseil-cpp
|
||||||
branch = lts_2021_11_02
|
|
||||||
[submodule "contrib/dragonbox"]
|
[submodule "contrib/dragonbox"]
|
||||||
path = contrib/dragonbox
|
path = contrib/dragonbox
|
||||||
url = https://github.com/ClickHouse/dragonbox.git
|
url = https://github.com/ClickHouse/dragonbox
|
||||||
[submodule "contrib/fast_float"]
|
[submodule "contrib/fast_float"]
|
||||||
path = contrib/fast_float
|
path = contrib/fast_float
|
||||||
url = https://github.com/fastfloat/fast_float
|
url = https://github.com/fastfloat/fast_float
|
||||||
@ -203,44 +178,43 @@
|
|||||||
url = https://github.com/ClickHouse/libpq
|
url = https://github.com/ClickHouse/libpq
|
||||||
[submodule "contrib/boringssl"]
|
[submodule "contrib/boringssl"]
|
||||||
path = contrib/boringssl
|
path = contrib/boringssl
|
||||||
url = https://github.com/ClickHouse/boringssl.git
|
url = https://github.com/ClickHouse/boringssl
|
||||||
branch = MergeWithUpstream
|
|
||||||
[submodule "contrib/NuRaft"]
|
[submodule "contrib/NuRaft"]
|
||||||
path = contrib/NuRaft
|
path = contrib/NuRaft
|
||||||
url = https://github.com/ClickHouse/NuRaft.git
|
url = https://github.com/ClickHouse/NuRaft
|
||||||
[submodule "contrib/nanodbc"]
|
[submodule "contrib/nanodbc"]
|
||||||
path = contrib/nanodbc
|
path = contrib/nanodbc
|
||||||
url = https://github.com/ClickHouse/nanodbc.git
|
url = https://github.com/ClickHouse/nanodbc
|
||||||
[submodule "contrib/datasketches-cpp"]
|
[submodule "contrib/datasketches-cpp"]
|
||||||
path = contrib/datasketches-cpp
|
path = contrib/datasketches-cpp
|
||||||
url = https://github.com/ClickHouse/datasketches-cpp.git
|
url = https://github.com/apache/datasketches-cpp
|
||||||
[submodule "contrib/yaml-cpp"]
|
[submodule "contrib/yaml-cpp"]
|
||||||
path = contrib/yaml-cpp
|
path = contrib/yaml-cpp
|
||||||
url = https://github.com/ClickHouse/yaml-cpp.git
|
url = https://github.com/ClickHouse/yaml-cpp
|
||||||
[submodule "contrib/cld2"]
|
[submodule "contrib/cld2"]
|
||||||
path = contrib/cld2
|
path = contrib/cld2
|
||||||
url = https://github.com/ClickHouse/cld2.git
|
url = https://github.com/ClickHouse/cld2
|
||||||
[submodule "contrib/libstemmer_c"]
|
[submodule "contrib/libstemmer_c"]
|
||||||
path = contrib/libstemmer_c
|
path = contrib/libstemmer_c
|
||||||
url = https://github.com/ClickHouse/libstemmer_c.git
|
url = https://github.com/ClickHouse/libstemmer_c
|
||||||
[submodule "contrib/wordnet-blast"]
|
[submodule "contrib/wordnet-blast"]
|
||||||
path = contrib/wordnet-blast
|
path = contrib/wordnet-blast
|
||||||
url = https://github.com/ClickHouse/wordnet-blast.git
|
url = https://github.com/ClickHouse/wordnet-blast
|
||||||
[submodule "contrib/lemmagen-c"]
|
[submodule "contrib/lemmagen-c"]
|
||||||
path = contrib/lemmagen-c
|
path = contrib/lemmagen-c
|
||||||
url = https://github.com/ClickHouse/lemmagen-c.git
|
url = https://github.com/ClickHouse/lemmagen-c
|
||||||
[submodule "contrib/libpqxx"]
|
[submodule "contrib/libpqxx"]
|
||||||
path = contrib/libpqxx
|
path = contrib/libpqxx
|
||||||
url = https://github.com/ClickHouse/libpqxx.git
|
url = https://github.com/ClickHouse/libpqxx
|
||||||
[submodule "contrib/sqlite-amalgamation"]
|
[submodule "contrib/sqlite-amalgamation"]
|
||||||
path = contrib/sqlite-amalgamation
|
path = contrib/sqlite-amalgamation
|
||||||
url = https://github.com/azadkuh/sqlite-amalgamation
|
url = https://github.com/ClickHouse/sqlite-amalgamation
|
||||||
[submodule "contrib/s2geometry"]
|
[submodule "contrib/s2geometry"]
|
||||||
path = contrib/s2geometry
|
path = contrib/s2geometry
|
||||||
url = https://github.com/ClickHouse/s2geometry.git
|
url = https://github.com/ClickHouse/s2geometry
|
||||||
[submodule "contrib/bzip2"]
|
[submodule "contrib/bzip2"]
|
||||||
path = contrib/bzip2
|
path = contrib/bzip2
|
||||||
url = https://github.com/ClickHouse/bzip2.git
|
url = https://github.com/ClickHouse/bzip2
|
||||||
[submodule "contrib/magic_enum"]
|
[submodule "contrib/magic_enum"]
|
||||||
path = contrib/magic_enum
|
path = contrib/magic_enum
|
||||||
url = https://github.com/Neargye/magic_enum
|
url = https://github.com/Neargye/magic_enum
|
||||||
@ -249,16 +223,134 @@
|
|||||||
url = https://github.com/google/libprotobuf-mutator
|
url = https://github.com/google/libprotobuf-mutator
|
||||||
[submodule "contrib/sysroot"]
|
[submodule "contrib/sysroot"]
|
||||||
path = contrib/sysroot
|
path = contrib/sysroot
|
||||||
url = https://github.com/ClickHouse/sysroot.git
|
url = https://github.com/ClickHouse/sysroot
|
||||||
[submodule "contrib/nlp-data"]
|
[submodule "contrib/nlp-data"]
|
||||||
path = contrib/nlp-data
|
path = contrib/nlp-data
|
||||||
url = https://github.com/ClickHouse/nlp-data.git
|
url = https://github.com/ClickHouse/nlp-data
|
||||||
[submodule "contrib/hive-metastore"]
|
[submodule "contrib/hive-metastore"]
|
||||||
path = contrib/hive-metastore
|
path = contrib/hive-metastore
|
||||||
url = https://github.com/ClickHouse/hive-metastore
|
url = https://github.com/ClickHouse/hive-metastore
|
||||||
[submodule "contrib/azure"]
|
[submodule "contrib/azure"]
|
||||||
path = contrib/azure
|
path = contrib/azure
|
||||||
url = https://github.com/ClickHouse/azure-sdk-for-cpp.git
|
url = https://github.com/ClickHouse/azure-sdk-for-cpp
|
||||||
[submodule "contrib/minizip-ng"]
|
[submodule "contrib/minizip-ng"]
|
||||||
path = contrib/minizip-ng
|
path = contrib/minizip-ng
|
||||||
url = https://github.com/zlib-ng/minizip-ng
|
url = https://github.com/zlib-ng/minizip-ng
|
||||||
|
[submodule "contrib/annoy"]
|
||||||
|
path = contrib/annoy
|
||||||
|
url = https://github.com/ClickHouse/annoy
|
||||||
|
[submodule "contrib/qpl"]
|
||||||
|
path = contrib/qpl
|
||||||
|
url = https://github.com/intel/qpl
|
||||||
|
[submodule "contrib/idxd-config"]
|
||||||
|
path = contrib/idxd-config
|
||||||
|
url = https://github.com/intel/idxd-config
|
||||||
|
[submodule "contrib/wyhash"]
|
||||||
|
path = contrib/wyhash
|
||||||
|
url = https://github.com/wangyi-fudan/wyhash
|
||||||
|
[submodule "contrib/nats-io"]
|
||||||
|
path = contrib/nats-io
|
||||||
|
url = https://github.com/ClickHouse/nats.c
|
||||||
|
[submodule "contrib/vectorscan"]
|
||||||
|
path = contrib/vectorscan
|
||||||
|
url = https://github.com/VectorCamp/vectorscan.git
|
||||||
|
[submodule "contrib/llvm-project"]
|
||||||
|
path = contrib/llvm-project
|
||||||
|
url = https://github.com/ClickHouse/llvm-project
|
||||||
|
[submodule "contrib/corrosion"]
|
||||||
|
path = contrib/corrosion
|
||||||
|
url = https://github.com/corrosion-rs/corrosion
|
||||||
|
[submodule "contrib/libssh"]
|
||||||
|
path = contrib/libssh
|
||||||
|
url = https://github.com/ClickHouse/libssh.git
|
||||||
|
[submodule "contrib/morton-nd"]
|
||||||
|
path = contrib/morton-nd
|
||||||
|
url = https://github.com/morton-nd/morton-nd
|
||||||
|
[submodule "contrib/xxHash"]
|
||||||
|
path = contrib/xxHash
|
||||||
|
url = https://github.com/Cyan4973/xxHash
|
||||||
|
[submodule "contrib/crc32-s390x"]
|
||||||
|
path = contrib/crc32-s390x
|
||||||
|
url = https://github.com/linux-on-ibm-z/crc32-s390x
|
||||||
|
[submodule "contrib/openssl"]
|
||||||
|
path = contrib/openssl
|
||||||
|
url = https://github.com/openssl/openssl
|
||||||
|
[submodule "contrib/google-benchmark"]
|
||||||
|
path = contrib/google-benchmark
|
||||||
|
url = https://github.com/google/benchmark
|
||||||
|
[submodule "contrib/libdivide"]
|
||||||
|
path = contrib/libdivide
|
||||||
|
url = https://github.com/ridiculousfish/libdivide
|
||||||
|
[submodule "contrib/libbcrypt"]
|
||||||
|
path = contrib/libbcrypt
|
||||||
|
url = https://github.com/rg3/libbcrypt.git
|
||||||
|
[submodule "contrib/ulid-c"]
|
||||||
|
path = contrib/ulid-c
|
||||||
|
url = https://github.com/ClickHouse/ulid-c.git
|
||||||
|
[submodule "contrib/aws-crt-cpp"]
|
||||||
|
path = contrib/aws-crt-cpp
|
||||||
|
url = https://github.com/ClickHouse/aws-crt-cpp
|
||||||
|
[submodule "contrib/aws-c-io"]
|
||||||
|
path = contrib/aws-c-io
|
||||||
|
url = https://github.com/ClickHouse/aws-c-io
|
||||||
|
[submodule "contrib/aws-c-mqtt"]
|
||||||
|
path = contrib/aws-c-mqtt
|
||||||
|
url = https://github.com/awslabs/aws-c-mqtt
|
||||||
|
[submodule "contrib/aws-c-auth"]
|
||||||
|
path = contrib/aws-c-auth
|
||||||
|
url = https://github.com/awslabs/aws-c-auth
|
||||||
|
[submodule "contrib/aws-c-cal"]
|
||||||
|
path = contrib/aws-c-cal
|
||||||
|
url = https://github.com/ClickHouse/aws-c-cal
|
||||||
|
[submodule "contrib/aws-c-sdkutils"]
|
||||||
|
path = contrib/aws-c-sdkutils
|
||||||
|
url = https://github.com/awslabs/aws-c-sdkutils
|
||||||
|
[submodule "contrib/aws-c-http"]
|
||||||
|
path = contrib/aws-c-http
|
||||||
|
url = https://github.com/awslabs/aws-c-http
|
||||||
|
[submodule "contrib/aws-c-s3"]
|
||||||
|
path = contrib/aws-c-s3
|
||||||
|
url = https://github.com/awslabs/aws-c-s3
|
||||||
|
[submodule "contrib/aws-c-compression"]
|
||||||
|
path = contrib/aws-c-compression
|
||||||
|
url = https://github.com/awslabs/aws-c-compression
|
||||||
|
[submodule "contrib/aws-s2n-tls"]
|
||||||
|
path = contrib/aws-s2n-tls
|
||||||
|
url = https://github.com/ClickHouse/s2n-tls
|
||||||
|
[submodule "contrib/crc32-vpmsum"]
|
||||||
|
path = contrib/crc32-vpmsum
|
||||||
|
url = https://github.com/antonblanchard/crc32-vpmsum.git
|
||||||
|
[submodule "contrib/liburing"]
|
||||||
|
path = contrib/liburing
|
||||||
|
url = https://github.com/axboe/liburing
|
||||||
|
[submodule "contrib/libarchive"]
|
||||||
|
path = contrib/libarchive
|
||||||
|
url = https://github.com/libarchive/libarchive.git
|
||||||
|
ignore = dirty
|
||||||
|
[submodule "contrib/libfiu"]
|
||||||
|
path = contrib/libfiu
|
||||||
|
url = https://github.com/ClickHouse/libfiu.git
|
||||||
|
[submodule "contrib/isa-l"]
|
||||||
|
path = contrib/isa-l
|
||||||
|
url = https://github.com/ClickHouse/isa-l.git
|
||||||
|
[submodule "contrib/c-ares"]
|
||||||
|
path = contrib/c-ares
|
||||||
|
url = https://github.com/c-ares/c-ares.git
|
||||||
|
[submodule "contrib/incbin"]
|
||||||
|
path = contrib/incbin
|
||||||
|
url = https://github.com/graphitemaster/incbin.git
|
||||||
|
[submodule "contrib/usearch"]
|
||||||
|
path = contrib/usearch
|
||||||
|
url = https://github.com/unum-cloud/usearch.git
|
||||||
|
[submodule "contrib/SimSIMD"]
|
||||||
|
path = contrib/SimSIMD
|
||||||
|
url = https://github.com/ashvardanian/SimSIMD.git
|
||||||
|
[submodule "contrib/FP16"]
|
||||||
|
path = contrib/FP16
|
||||||
|
url = https://github.com/Maratyszcza/FP16.git
|
||||||
|
[submodule "contrib/robin-map"]
|
||||||
|
path = contrib/robin-map
|
||||||
|
url = https://github.com/Tessil/robin-map.git
|
||||||
|
[submodule "contrib/aklomp-base64"]
|
||||||
|
path = contrib/aklomp-base64
|
||||||
|
url = https://github.com/aklomp/base64.git
|
||||||
|
@ -13,9 +13,7 @@ max-statements=200
|
|||||||
ignore-long-lines = (# )?<?https?://\S+>?$
|
ignore-long-lines = (# )?<?https?://\S+>?$
|
||||||
|
|
||||||
[MESSAGES CONTROL]
|
[MESSAGES CONTROL]
|
||||||
disable = bad-continuation,
|
disable = missing-docstring,
|
||||||
missing-docstring,
|
|
||||||
bad-whitespace,
|
|
||||||
too-few-public-methods,
|
too-few-public-methods,
|
||||||
invalid-name,
|
invalid-name,
|
||||||
too-many-arguments,
|
too-many-arguments,
|
||||||
|
4
.snyk
Normal file
4
.snyk
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
# Snyk (https://snyk.io) policy file
|
||||||
|
exclude:
|
||||||
|
global:
|
||||||
|
- tests/**
|
2
.vimrc
2
.vimrc
@ -1,2 +0,0 @@
|
|||||||
au BufRead,BufNewFile ./* set tabstop=4 softtabstop=0 expandtab shiftwidth=4 smarttab tags=tags,../tags
|
|
||||||
|
|
@ -6,8 +6,10 @@ rules:
|
|||||||
level: warning
|
level: warning
|
||||||
indent-sequences: consistent
|
indent-sequences: consistent
|
||||||
line-length:
|
line-length:
|
||||||
# there are some bash -c "", so this is OK
|
# there are:
|
||||||
max: 300
|
# - bash -c "", so this is OK
|
||||||
|
# - yaml in tests
|
||||||
|
max: 1000
|
||||||
level: warning
|
level: warning
|
||||||
comments:
|
comments:
|
||||||
min-spaces-from-content: 1
|
min-spaces-from-content: 1
|
||||||
|
2417
CHANGELOG.md
2417
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
469
CMakeLists.txt
469
CMakeLists.txt
@ -1,32 +1,6 @@
|
|||||||
cmake_minimum_required(VERSION 3.14)
|
cmake_minimum_required(VERSION 3.20)
|
||||||
|
|
||||||
foreach(policy
|
project(ClickHouse LANGUAGES C CXX ASM)
|
||||||
CMP0023
|
|
||||||
CMP0048 # CMake 3.0
|
|
||||||
CMP0074 # CMake 3.12
|
|
||||||
CMP0077
|
|
||||||
CMP0079
|
|
||||||
)
|
|
||||||
if(POLICY ${policy})
|
|
||||||
cmake_policy(SET ${policy} NEW)
|
|
||||||
endif()
|
|
||||||
endforeach()
|
|
||||||
|
|
||||||
# set default policy
|
|
||||||
foreach(default_policy_var_name
|
|
||||||
# make option() honor normal variables for BUILD_SHARED_LIBS:
|
|
||||||
# - re2
|
|
||||||
# - snappy
|
|
||||||
CMAKE_POLICY_DEFAULT_CMP0077
|
|
||||||
# Google Test from sources uses too old cmake, 2.6.x, and CMP0022 should
|
|
||||||
# set, to avoid using deprecated LINK_INTERFACE_LIBRARIES(_<CONFIG>)? over
|
|
||||||
# INTERFACE_LINK_LIBRARIES.
|
|
||||||
CMAKE_POLICY_DEFAULT_CMP0022
|
|
||||||
)
|
|
||||||
set(${default_policy_var_name} NEW)
|
|
||||||
endforeach()
|
|
||||||
|
|
||||||
project(ClickHouse)
|
|
||||||
|
|
||||||
# If turned off: e.g. when ENABLE_FOO is ON, but FOO tool was not found, the CMake will continue.
|
# If turned off: e.g. when ENABLE_FOO is ON, but FOO tool was not found, the CMake will continue.
|
||||||
option(FAIL_ON_UNSUPPORTED_OPTIONS_COMBINATION
|
option(FAIL_ON_UNSUPPORTED_OPTIONS_COMBINATION
|
||||||
@ -39,20 +13,19 @@ else()
|
|||||||
set(RECONFIGURE_MESSAGE_LEVEL WARNING)
|
set(RECONFIGURE_MESSAGE_LEVEL WARNING)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
enable_language(C CXX ASM)
|
|
||||||
|
|
||||||
include (cmake/arch.cmake)
|
include (cmake/arch.cmake)
|
||||||
include (cmake/target.cmake)
|
include (cmake/target.cmake)
|
||||||
include (cmake/tools.cmake)
|
include (cmake/tools.cmake)
|
||||||
include (cmake/analysis.cmake)
|
include (cmake/ccache.cmake)
|
||||||
include (cmake/git_status.cmake)
|
include (cmake/clang_tidy.cmake)
|
||||||
|
include (cmake/git.cmake)
|
||||||
|
include (cmake/utils.cmake)
|
||||||
|
|
||||||
# Ignore export() since we don't use it,
|
# Ignore export() since we don't use it,
|
||||||
# but it gets broken with a global targets via link_libraries()
|
# but it gets broken with a global targets via link_libraries()
|
||||||
macro (export)
|
macro (export)
|
||||||
endmacro ()
|
endmacro ()
|
||||||
|
|
||||||
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}/cmake/Modules/")
|
|
||||||
set(CMAKE_EXPORT_COMPILE_COMMANDS 1) # Write compile_commands.json
|
set(CMAKE_EXPORT_COMPILE_COMMANDS 1) # Write compile_commands.json
|
||||||
set(CMAKE_LINK_DEPENDS_NO_SHARED 1) # Do not relink all depended targets on .so
|
set(CMAKE_LINK_DEPENDS_NO_SHARED 1) # Do not relink all depended targets on .so
|
||||||
set(CMAKE_CONFIGURATION_TYPES "RelWithDebInfo;Debug;Release;MinSizeRel" CACHE STRING "" FORCE)
|
set(CMAKE_CONFIGURATION_TYPES "RelWithDebInfo;Debug;Release;MinSizeRel" CACHE STRING "" FORCE)
|
||||||
@ -64,11 +37,9 @@ set_property(GLOBAL PROPERTY USE_FOLDERS ON)
|
|||||||
|
|
||||||
# Check that submodules are present
|
# Check that submodules are present
|
||||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/sysroot/README.md")
|
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/sysroot/README.md")
|
||||||
message (FATAL_ERROR "Submodules are not initialized. Run\n\tgit submodule update --init --recursive")
|
message (FATAL_ERROR "Submodules are not initialized. Run\n\tgit submodule update --init")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
include (cmake/ccache.cmake)
|
|
||||||
|
|
||||||
# Take care to add prlimit in command line before ccache, or else ccache thinks that
|
# Take care to add prlimit in command line before ccache, or else ccache thinks that
|
||||||
# prlimit is compiler, and clang++ is its input file, and refuses to work with
|
# prlimit is compiler, and clang++ is its input file, and refuses to work with
|
||||||
# multiple inputs, e.g in ccache log:
|
# multiple inputs, e.g in ccache log:
|
||||||
@ -87,8 +58,8 @@ if (ENABLE_CHECK_HEAVY_BUILDS)
|
|||||||
# set CPU time limit to 1000 seconds
|
# set CPU time limit to 1000 seconds
|
||||||
set (RLIMIT_CPU 1000)
|
set (RLIMIT_CPU 1000)
|
||||||
|
|
||||||
# gcc10/gcc10/clang -fsanitize=memory is too heavy
|
# -fsanitize=memory is too heavy
|
||||||
if (SANITIZE STREQUAL "memory" OR COMPILER_GCC)
|
if (SANITIZE STREQUAL "memory")
|
||||||
set (RLIMIT_DATA 10000000000) # 10G
|
set (RLIMIT_DATA 10000000000) # 10G
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
@ -103,28 +74,7 @@ message (STATUS "CMAKE_BUILD_TYPE: ${CMAKE_BUILD_TYPE}")
|
|||||||
|
|
||||||
string (TOUPPER ${CMAKE_BUILD_TYPE} CMAKE_BUILD_TYPE_UC)
|
string (TOUPPER ${CMAKE_BUILD_TYPE} CMAKE_BUILD_TYPE_UC)
|
||||||
|
|
||||||
option(USE_STATIC_LIBRARIES "Disable to use shared libraries" ON)
|
list(REVERSE CMAKE_FIND_LIBRARY_SUFFIXES)
|
||||||
|
|
||||||
if (NOT USE_STATIC_LIBRARIES)
|
|
||||||
# DEVELOPER ONLY.
|
|
||||||
# Faster linking if turned on.
|
|
||||||
option(SPLIT_SHARED_LIBRARIES "Keep all internal libraries as separate .so files")
|
|
||||||
|
|
||||||
option(CLICKHOUSE_SPLIT_BINARY
|
|
||||||
"Make several binaries (clickhouse-server, clickhouse-client etc.) instead of one bundled")
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
if (USE_STATIC_LIBRARIES AND SPLIT_SHARED_LIBRARIES)
|
|
||||||
message(FATAL_ERROR "Defining SPLIT_SHARED_LIBRARIES=1 without USE_STATIC_LIBRARIES=0 has no effect.")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
if (NOT USE_STATIC_LIBRARIES AND SPLIT_SHARED_LIBRARIES)
|
|
||||||
set(BUILD_SHARED_LIBS 1 CACHE INTERNAL "")
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
if (USE_STATIC_LIBRARIES)
|
|
||||||
list(REVERSE CMAKE_FIND_LIBRARY_SUFFIXES)
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
option (ENABLE_FUZZING "Fuzzy testing using libfuzzer" OFF)
|
option (ENABLE_FUZZING "Fuzzy testing using libfuzzer" OFF)
|
||||||
|
|
||||||
@ -138,8 +88,6 @@ if (ENABLE_FUZZING)
|
|||||||
set (ENABLE_CLICKHOUSE_ODBC_BRIDGE OFF)
|
set (ENABLE_CLICKHOUSE_ODBC_BRIDGE OFF)
|
||||||
set (ENABLE_LIBRARIES 0)
|
set (ENABLE_LIBRARIES 0)
|
||||||
set (ENABLE_SSL 1)
|
set (ENABLE_SSL 1)
|
||||||
set (USE_UNWIND ON)
|
|
||||||
set (ENABLE_EMBEDDED_COMPILER 0)
|
|
||||||
set (ENABLE_EXAMPLES 0)
|
set (ENABLE_EXAMPLES 0)
|
||||||
set (ENABLE_UTILS 0)
|
set (ENABLE_UTILS 0)
|
||||||
set (ENABLE_THINLTO 0)
|
set (ENABLE_THINLTO 0)
|
||||||
@ -147,6 +95,7 @@ if (ENABLE_FUZZING)
|
|||||||
set (ENABLE_JEMALLOC 0)
|
set (ENABLE_JEMALLOC 0)
|
||||||
set (ENABLE_CHECK_HEAVY_BUILDS 1)
|
set (ENABLE_CHECK_HEAVY_BUILDS 1)
|
||||||
set (GLIBC_COMPATIBILITY OFF)
|
set (GLIBC_COMPATIBILITY OFF)
|
||||||
|
set (ENABLE_BENCHMARKS 0)
|
||||||
|
|
||||||
# For codegen_select_fuzzer
|
# For codegen_select_fuzzer
|
||||||
set (ENABLE_PROTOBUF 1)
|
set (ENABLE_PROTOBUF 1)
|
||||||
@ -158,57 +107,65 @@ endif()
|
|||||||
# - sanitize.cmake
|
# - sanitize.cmake
|
||||||
add_library(global-libs INTERFACE)
|
add_library(global-libs INTERFACE)
|
||||||
|
|
||||||
include (cmake/fuzzer.cmake)
|
# We don't want to instrument everything with fuzzer, but only specific targets (see below),
|
||||||
|
# also, since we build our own llvm, we specifically don't want to instrument
|
||||||
|
# libFuzzer library itself - it would result in infinite recursion
|
||||||
|
#include (cmake/fuzzer.cmake)
|
||||||
|
|
||||||
include (cmake/sanitize.cmake)
|
include (cmake/sanitize.cmake)
|
||||||
|
|
||||||
if (CMAKE_GENERATOR STREQUAL "Ninja" AND NOT DISABLE_COLORED_BUILD)
|
option(ENABLE_COLORED_BUILD "Enable colors in compiler output" ON)
|
||||||
|
|
||||||
|
set (CMAKE_COLOR_MAKEFILE ${ENABLE_COLORED_BUILD}) # works only for the makefile generator
|
||||||
|
|
||||||
|
if (ENABLE_COLORED_BUILD AND CMAKE_GENERATOR STREQUAL "Ninja")
|
||||||
# Turn on colored output. https://github.com/ninja-build/ninja/wiki/FAQ
|
# Turn on colored output. https://github.com/ninja-build/ninja/wiki/FAQ
|
||||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fdiagnostics-color=always")
|
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fdiagnostics-color=always")
|
||||||
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fdiagnostics-color=always")
|
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fdiagnostics-color=always")
|
||||||
|
# ... such manually setting of flags can be removed once CMake supports a variable to
|
||||||
|
# activate colors in *all* build systems: https://gitlab.kitware.com/cmake/cmake/-/issues/15502
|
||||||
|
# --> available since CMake 3.24: https://stackoverflow.com/a/73349744
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
include (cmake/check_flags.cmake)
|
include (cmake/check_flags.cmake)
|
||||||
include (cmake/add_warning.cmake)
|
include (cmake/add_warning.cmake)
|
||||||
|
|
||||||
set (COMMON_WARNING_FLAGS "${COMMON_WARNING_FLAGS} -Wall") # -Werror and many more is also added inside cmake/warnings.cmake
|
|
||||||
|
|
||||||
if (COMPILER_CLANG)
|
if (COMPILER_CLANG)
|
||||||
# clang: warning: argument unused during compilation: '-specs=/usr/share/dpkg/no-pie-compile.specs' [-Wunused-command-line-argument]
|
|
||||||
set (COMMON_WARNING_FLAGS "${COMMON_WARNING_FLAGS} -Wno-unused-command-line-argument")
|
|
||||||
# generate ranges for fast "addr2line" search
|
# generate ranges for fast "addr2line" search
|
||||||
if (NOT CMAKE_BUILD_TYPE_UC STREQUAL "RELEASE")
|
if (NOT CMAKE_BUILD_TYPE_UC STREQUAL "RELEASE")
|
||||||
|
# NOTE: that clang has a bug because of it does not emit .debug_aranges
|
||||||
|
# with ThinLTO, so custom ld.lld wrapper is shipped in docker images.
|
||||||
set(COMPILER_FLAGS "${COMPILER_FLAGS} -gdwarf-aranges")
|
set(COMPILER_FLAGS "${COMPILER_FLAGS} -gdwarf-aranges")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (HAS_USE_CTOR_HOMING)
|
# See https://blog.llvm.org/posts/2021-04-05-constructor-homing-for-debug-info/
|
||||||
# For more info see https://blog.llvm.org/posts/2021-04-05-constructor-homing-for-debug-info/
|
if (CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG" OR CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO")
|
||||||
if (CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG" OR CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO")
|
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Xclang -fuse-ctor-homing")
|
||||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Xclang -fuse-ctor-homing")
|
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Xclang -fuse-ctor-homing")
|
||||||
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Xclang -fuse-ctor-homing")
|
|
||||||
endif()
|
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
|
no_warning(enum-constexpr-conversion) # breaks Protobuf in clang-16
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
# If compiler has support for -Wreserved-identifier. It is difficult to detect by clang version,
|
|
||||||
# because there are two different branches of clang: clang and AppleClang.
|
|
||||||
# (AppleClang is not supported by ClickHouse, but some developers have misfortune to use it).
|
|
||||||
if (HAS_RESERVED_IDENTIFIER)
|
|
||||||
add_compile_definitions (HAS_RESERVED_IDENTIFIER)
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
# If turned `ON`, assumes the user has either the system GTest library or the bundled one.
|
|
||||||
option(ENABLE_TESTS "Provide unit_test_dbms target with Google.Test unit tests" ON)
|
option(ENABLE_TESTS "Provide unit_test_dbms target with Google.Test unit tests" ON)
|
||||||
option(ENABLE_EXAMPLES "Build all example programs in 'examples' subdirectories" OFF)
|
option(ENABLE_EXAMPLES "Build all example programs in 'examples' subdirectories" OFF)
|
||||||
|
option(ENABLE_BENCHMARKS "Build all benchmark programs in 'benchmarks' subdirectories" OFF)
|
||||||
|
|
||||||
if (OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64) AND USE_STATIC_LIBRARIES AND NOT SPLIT_SHARED_LIBRARIES AND NOT USE_MUSL)
|
if (OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64) AND NOT USE_MUSL)
|
||||||
# Only for Linux, x86_64 or aarch64.
|
# Only for Linux, x86_64 or aarch64.
|
||||||
option(GLIBC_COMPATIBILITY "Enable compatibility with older glibc libraries." ON)
|
option(GLIBC_COMPATIBILITY "Enable compatibility with older glibc libraries." ON)
|
||||||
elseif(GLIBC_COMPATIBILITY)
|
elseif(GLIBC_COMPATIBILITY)
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Glibc compatibility cannot be enabled in current configuration")
|
message (${RECONFIGURE_MESSAGE_LEVEL} "Glibc compatibility cannot be enabled in current configuration")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
# Make sure the final executable has symbols exported
|
if (OS_LINUX)
|
||||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -rdynamic")
|
# We should not export dynamic symbols, because:
|
||||||
|
# - The main clickhouse binary does not use dlopen,
|
||||||
|
# and whatever is poisoning it by LD_PRELOAD should not link to our symbols.
|
||||||
|
# - The clickhouse-odbc-bridge and clickhouse-library-bridge binaries
|
||||||
|
# should not expose their symbols to ODBC drivers and libraries.
|
||||||
|
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--no-export-dynamic -Wl,--gc-sections")
|
||||||
|
endif ()
|
||||||
|
|
||||||
if (OS_DARWIN)
|
if (OS_DARWIN)
|
||||||
# The `-all_load` flag forces loading of all symbols from all libraries,
|
# The `-all_load` flag forces loading of all symbols from all libraries,
|
||||||
@ -222,40 +179,31 @@ else ()
|
|||||||
set(NO_WHOLE_ARCHIVE --no-whole-archive)
|
set(NO_WHOLE_ARCHIVE --no-whole-archive)
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
option(ENABLE_CURL_BUILD "Enable curl, azure, sentry build on by default except MacOS." ON)
|
|
||||||
if (OS_DARWIN)
|
|
||||||
# Disable the curl, azure, senry build on MacOS
|
|
||||||
set (ENABLE_CURL_BUILD OFF)
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
# Ignored if `lld` is used
|
|
||||||
option(ADD_GDB_INDEX_FOR_GOLD "Add .gdb-index to resulting binaries for gold linker.")
|
|
||||||
|
|
||||||
if (NOT CMAKE_BUILD_TYPE_UC STREQUAL "RELEASE")
|
if (NOT CMAKE_BUILD_TYPE_UC STREQUAL "RELEASE")
|
||||||
# Can be lld or ld-lld.
|
# Can be lld or ld-lld or lld-13 or /path/to/lld.
|
||||||
if (LINKER_NAME MATCHES "lld$")
|
if (LINKER_NAME MATCHES "lld")
|
||||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--gdb-index")
|
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--gdb-index")
|
||||||
set (CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,--gdb-index")
|
|
||||||
message (STATUS "Adding .gdb-index via --gdb-index linker option.")
|
message (STATUS "Adding .gdb-index via --gdb-index linker option.")
|
||||||
# we use another tool for gdb-index, because gold linker removes section .debug_aranges, which used inside clickhouse stacktraces
|
|
||||||
# http://sourceware-org.1504.n7.nabble.com/gold-No-debug-aranges-section-when-linking-with-gdb-index-td540965.html#a556932
|
|
||||||
elseif (LINKER_NAME MATCHES "gold$" AND ADD_GDB_INDEX_FOR_GOLD)
|
|
||||||
find_program (GDB_ADD_INDEX_EXE NAMES "gdb-add-index" DOC "Path to gdb-add-index executable")
|
|
||||||
if (NOT GDB_ADD_INDEX_EXE)
|
|
||||||
set (USE_GDB_ADD_INDEX 0)
|
|
||||||
message (WARNING "Cannot add gdb index to binaries, because gold linker is used, but gdb-add-index executable not found.")
|
|
||||||
else()
|
|
||||||
set (USE_GDB_ADD_INDEX 1)
|
|
||||||
message (STATUS "gdb-add-index found: ${GDB_ADD_INDEX_EXE}")
|
|
||||||
endif()
|
|
||||||
endif ()
|
endif ()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if (CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG")
|
if (NOT (SANITIZE_COVERAGE OR WITH_COVERAGE)
|
||||||
set(USE_DEBUG_HELPERS ON)
|
AND (CMAKE_BUILD_TYPE_UC STREQUAL "RELEASE"
|
||||||
|
OR CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO"
|
||||||
|
OR CMAKE_BUILD_TYPE_UC STREQUAL "MINSIZEREL"))
|
||||||
|
set (OMIT_HEAVY_DEBUG_SYMBOLS_DEFAULT ON)
|
||||||
|
else()
|
||||||
|
set (OMIT_HEAVY_DEBUG_SYMBOLS_DEFAULT OFF)
|
||||||
endif()
|
endif()
|
||||||
|
# Provides faster linking and lower binary size.
|
||||||
|
# Tradeoff is the inability to debug some source files with e.g. gdb
|
||||||
|
# (empty stack frames and no local variables)."
|
||||||
|
option(OMIT_HEAVY_DEBUG_SYMBOLS
|
||||||
|
"Do not generate debugger info for heavy modules (ClickHouse functions and dictionaries, some contrib)"
|
||||||
|
${OMIT_HEAVY_DEBUG_SYMBOLS_DEFAULT})
|
||||||
|
|
||||||
option(USE_DEBUG_HELPERS "Enable debug helpers" ${USE_DEBUG_HELPERS})
|
option(USE_DEBUG_HELPERS "Enable debug helpers" ${USE_DEBUG_HELPERS})
|
||||||
|
|
||||||
option(BUILD_STANDALONE_KEEPER "Build keeper as small standalone binary" OFF)
|
option(BUILD_STANDALONE_KEEPER "Build keeper as small standalone binary" OFF)
|
||||||
if (NOT BUILD_STANDALONE_KEEPER)
|
if (NOT BUILD_STANDALONE_KEEPER)
|
||||||
option(CREATE_KEEPER_SYMLINK "Create symlink for clickhouse-keeper to main server binary" ON)
|
option(CREATE_KEEPER_SYMLINK "Create symlink for clickhouse-keeper to main server binary" ON)
|
||||||
@ -264,7 +212,8 @@ else ()
|
|||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
# Create BuildID when using lld. For other linkers it is created by default.
|
# Create BuildID when using lld. For other linkers it is created by default.
|
||||||
if (LINKER_NAME MATCHES "lld$")
|
# (NOTE: LINKER_NAME can be either path or name, and in different variants)
|
||||||
|
if (LINKER_NAME MATCHES "lld")
|
||||||
# SHA1 is not cryptographically secure but it is the best what lld is offering.
|
# SHA1 is not cryptographically secure but it is the best what lld is offering.
|
||||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--build-id=sha1")
|
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--build-id=sha1")
|
||||||
endif ()
|
endif ()
|
||||||
@ -272,16 +221,18 @@ endif ()
|
|||||||
# Add a section with the hash of the compiled machine code for integrity checks.
|
# Add a section with the hash of the compiled machine code for integrity checks.
|
||||||
# Only for official builds, because adding a section can be time consuming (rewrite of several GB).
|
# Only for official builds, because adding a section can be time consuming (rewrite of several GB).
|
||||||
# And cross compiled binaries are not supported (since you cannot execute clickhouse hash-binary)
|
# And cross compiled binaries are not supported (since you cannot execute clickhouse hash-binary)
|
||||||
if (OBJCOPY_PATH AND CLICKHOUSE_OFFICIAL_BUILD AND (NOT CMAKE_TOOLCHAIN_FILE OR CMAKE_TOOLCHAIN_FILE MATCHES "linux/toolchain-x86_64.cmake$"))
|
if (CLICKHOUSE_OFFICIAL_BUILD AND (NOT CMAKE_TOOLCHAIN_FILE OR CMAKE_TOOLCHAIN_FILE MATCHES "linux/toolchain-x86_64.cmake$"))
|
||||||
|
message(STATUS "Official build: A checksum hash will be added to the clickhouse executable")
|
||||||
set (USE_BINARY_HASH 1 CACHE STRING "Calculate binary hash and store it in the separate section")
|
set (USE_BINARY_HASH 1 CACHE STRING "Calculate binary hash and store it in the separate section")
|
||||||
|
else ()
|
||||||
|
message(STATUS "No official build: A checksum hash will not be added to the clickhouse executable")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
# Allows to build stripped binary in a separate directory
|
# Optionally split binaries and debug symbols.
|
||||||
if (OBJCOPY_PATH AND STRIP_PATH)
|
option(SPLIT_DEBUG_SYMBOLS "Split binaries and debug symbols" OFF)
|
||||||
option(INSTALL_STRIPPED_BINARIES "Build stripped binaries with debug info in separate directory" OFF)
|
if (SPLIT_DEBUG_SYMBOLS)
|
||||||
if (INSTALL_STRIPPED_BINARIES)
|
message(STATUS "Will split binaries and debug symbols")
|
||||||
set(STRIPPED_BINARIES_OUTPUT "stripped" CACHE STRING "A separate directory for stripped information")
|
set(SPLITTED_DEBUG_SYMBOLS_DIR "stripped" CACHE STRING "A separate directory for stripped information")
|
||||||
endif()
|
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
cmake_host_system_information(RESULT AVAILABLE_PHYSICAL_MEMORY QUERY AVAILABLE_PHYSICAL_MEMORY) # Not available under freebsd
|
cmake_host_system_information(RESULT AVAILABLE_PHYSICAL_MEMORY QUERY AVAILABLE_PHYSICAL_MEMORY) # Not available under freebsd
|
||||||
@ -315,84 +266,74 @@ endif ()
|
|||||||
option (ENABLE_BUILD_PATH_MAPPING "Enable remapping of file source paths in debug info, predefined preprocessor macros, and __builtin_FILE(). It's used to generate reproducible builds. See https://reproducible-builds.org/docs/build-path" ${ENABLE_BUILD_PATH_MAPPING_DEFAULT})
|
option (ENABLE_BUILD_PATH_MAPPING "Enable remapping of file source paths in debug info, predefined preprocessor macros, and __builtin_FILE(). It's used to generate reproducible builds. See https://reproducible-builds.org/docs/build-path" ${ENABLE_BUILD_PATH_MAPPING_DEFAULT})
|
||||||
|
|
||||||
if (ENABLE_BUILD_PATH_MAPPING)
|
if (ENABLE_BUILD_PATH_MAPPING)
|
||||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} -ffile-prefix-map=${CMAKE_SOURCE_DIR}=.")
|
set (COMPILER_FLAGS "${COMPILER_FLAGS} -ffile-prefix-map=${PROJECT_SOURCE_DIR}=.")
|
||||||
set (CMAKE_ASM_FLAGS "${CMAKE_ASM_FLAGS} -ffile-prefix-map=${CMAKE_SOURCE_DIR}=.")
|
set (CMAKE_ASM_FLAGS "${CMAKE_ASM_FLAGS} -ffile-prefix-map=${PROJECT_SOURCE_DIR}=.")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
option (ENABLE_BUILD_PROFILING "Enable profiling of build time" OFF)
|
option (ENABLE_BUILD_PROFILING "Enable profiling of build time" OFF)
|
||||||
if (ENABLE_BUILD_PROFILING)
|
if (ENABLE_BUILD_PROFILING)
|
||||||
if (COMPILER_CLANG)
|
if (COMPILER_CLANG)
|
||||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} -ftime-trace")
|
set (COMPILER_FLAGS "${COMPILER_FLAGS} -ftime-trace")
|
||||||
|
|
||||||
|
if (LINKER_NAME MATCHES "lld")
|
||||||
|
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--time-trace")
|
||||||
|
set (CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} -Wl,--time-trace")
|
||||||
|
endif ()
|
||||||
else ()
|
else ()
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Build profiling is only available with CLang")
|
message (${RECONFIGURE_MESSAGE_LEVEL} "Build profiling is only available with CLang")
|
||||||
endif ()
|
endif ()
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (${CMAKE_VERSION} VERSION_LESS "3.12.4")
|
set (CMAKE_CXX_STANDARD 23)
|
||||||
# CMake < 3.12 doesn't support setting 20 as a C++ standard version.
|
set (CMAKE_CXX_EXTENSIONS OFF)
|
||||||
# We will add C++ standard controlling flag in CMAKE_CXX_FLAGS manually for now.
|
set (CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||||
|
|
||||||
if (COMPILER_GCC OR COMPILER_CLANG)
|
|
||||||
# to make numeric_limits<__int128> works with GCC
|
|
||||||
set (_CXX_STANDARD "gnu++2a")
|
|
||||||
else ()
|
|
||||||
set (_CXX_STANDARD "c++2a")
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=${_CXX_STANDARD}")
|
|
||||||
else ()
|
|
||||||
set (CMAKE_CXX_STANDARD 20)
|
|
||||||
set (CMAKE_CXX_EXTENSIONS ON) # Same as gnu++2a (ON) vs c++2a (OFF): https://cmake.org/cmake/help/latest/prop_tgt/CXX_EXTENSIONS.html
|
|
||||||
set (CMAKE_CXX_STANDARD_REQUIRED ON)
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
set (CMAKE_C_STANDARD 11)
|
set (CMAKE_C_STANDARD 11)
|
||||||
set (CMAKE_C_EXTENSIONS ON)
|
set (CMAKE_C_EXTENSIONS ON) # required by most contribs written in C
|
||||||
set (CMAKE_C_STANDARD_REQUIRED ON)
|
set (CMAKE_C_STANDARD_REQUIRED ON)
|
||||||
|
|
||||||
if (COMPILER_GCC OR COMPILER_CLANG)
|
if (COMPILER_CLANG)
|
||||||
# Enable C++14 sized global deallocation functions. It should be enabled by setting -std=c++14 but I'm not sure.
|
# Enable C++14 sized global deallocation functions. It should be enabled by setting -std=c++14 but I'm not sure.
|
||||||
|
# See https://reviews.llvm.org/D112921
|
||||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsized-deallocation")
|
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsized-deallocation")
|
||||||
endif ()
|
|
||||||
|
|
||||||
# falign-functions=32 prevents from random performance regressions with the code change. Thus, providing more stable
|
# falign-functions=32 prevents from random performance regressions with the code change. Thus, providing more stable
|
||||||
# benchmarks.
|
# benchmarks.
|
||||||
if (COMPILER_GCC OR COMPILER_CLANG)
|
|
||||||
set(COMPILER_FLAGS "${COMPILER_FLAGS} -falign-functions=32")
|
set(COMPILER_FLAGS "${COMPILER_FLAGS} -falign-functions=32")
|
||||||
|
|
||||||
|
if (ARCH_AMD64)
|
||||||
|
# align branches within a 32-Byte boundary to avoid the potential performance loss when code layout change,
|
||||||
|
# which makes benchmark results more stable.
|
||||||
|
set(BRANCHES_WITHIN_32B_BOUNDARIES "-mbranches-within-32B-boundaries")
|
||||||
|
set(COMPILER_FLAGS "${COMPILER_FLAGS} ${BRANCHES_WITHIN_32B_BOUNDARIES}")
|
||||||
|
endif()
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (COMPILER_GCC)
|
|
||||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fcoroutines")
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
# Compiler-specific coverage flags e.g. -fcoverage-mapping for gcc
|
|
||||||
option(WITH_COVERAGE "Profile the resulting binary/binaries" OFF)
|
|
||||||
|
|
||||||
if (WITH_COVERAGE AND COMPILER_CLANG)
|
|
||||||
set(COMPILER_FLAGS "${COMPILER_FLAGS} -fprofile-instr-generate -fcoverage-mapping")
|
|
||||||
# If we want to disable coverage for specific translation units
|
|
||||||
set(WITHOUT_COVERAGE "-fno-profile-instr-generate -fno-coverage-mapping")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
if (WITH_COVERAGE AND COMPILER_GCC)
|
|
||||||
set(COMPILER_FLAGS "${COMPILER_FLAGS} -fprofile-arcs -ftest-coverage")
|
|
||||||
set(COVERAGE_OPTION "-lgcov")
|
|
||||||
set(WITHOUT_COVERAGE "-fno-profile-arcs -fno-test-coverage")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
set (COMPILER_FLAGS "${COMPILER_FLAGS}")
|
set (COMPILER_FLAGS "${COMPILER_FLAGS}")
|
||||||
|
|
||||||
# Our built-in unwinder only supports DWARF version up to 4.
|
# Our built-in unwinder only supports DWARF version up to 4.
|
||||||
set (DEBUG_INFO_FLAGS "-g -gdwarf-4")
|
set (DEBUG_INFO_FLAGS "-g")
|
||||||
|
|
||||||
set (CMAKE_BUILD_COLOR_MAKEFILE ON)
|
# Disable omit frame pointer compiler optimization using -fno-omit-frame-pointer
|
||||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COMPILER_FLAGS} ${PLATFORM_EXTRA_CXX_FLAG} ${COMMON_WARNING_FLAGS} ${CXX_WARNING_FLAGS}")
|
option(DISABLE_OMIT_FRAME_POINTER "Disable omit frame pointer compiler optimization" OFF)
|
||||||
|
|
||||||
|
if (DISABLE_OMIT_FRAME_POINTER)
|
||||||
|
set (CMAKE_CXX_FLAGS_ADD "${CMAKE_CXX_FLAGS_ADD} -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer")
|
||||||
|
set (CMAKE_C_FLAGS_ADD "${CMAKE_C_FLAGS_ADD} -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer")
|
||||||
|
set (CMAKE_ASM_FLAGS_ADD "${CMAKE_ASM_FLAGS_ADD} -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COMPILER_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
|
||||||
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
|
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
|
||||||
set (CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} -fno-inline ${CMAKE_CXX_FLAGS_ADD}")
|
set (CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
|
||||||
|
|
||||||
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COMPILER_FLAGS} ${COMMON_WARNING_FLAGS} ${CMAKE_C_FLAGS_ADD}")
|
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COMPILER_FLAGS} ${CMAKE_C_FLAGS_ADD}")
|
||||||
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_C_FLAGS_ADD}")
|
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_C_FLAGS_ADD}")
|
||||||
set (CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} -fno-inline ${CMAKE_C_FLAGS_ADD}")
|
set (CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} ${CMAKE_C_FLAGS_ADD}")
|
||||||
|
|
||||||
|
set (CMAKE_ASM_FLAGS "${CMAKE_ASM_FLAGS} ${COMPILER_FLAGS} ${CMAKE_ASM_FLAGS_ADD}")
|
||||||
|
set (CMAKE_ASM_FLAGS_RELWITHDEBINFO "${CMAKE_ASM_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_ASM_FLAGS_ADD}")
|
||||||
|
set (CMAKE_ASM_FLAGS_DEBUG "${CMAKE_ASM_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} ${CMAKE_ASM_FLAGS_ADD}")
|
||||||
|
|
||||||
if (COMPILER_CLANG)
|
if (COMPILER_CLANG)
|
||||||
if (OS_DARWIN)
|
if (OS_DARWIN)
|
||||||
@ -404,20 +345,15 @@ if (COMPILER_CLANG)
|
|||||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fdiagnostics-absolute-paths")
|
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fdiagnostics-absolute-paths")
|
||||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fdiagnostics-absolute-paths")
|
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fdiagnostics-absolute-paths")
|
||||||
|
|
||||||
if (NOT ENABLE_TESTS AND NOT SANITIZE)
|
if (NOT ENABLE_TESTS AND NOT SANITIZE AND OS_LINUX)
|
||||||
# https://clang.llvm.org/docs/ThinLTO.html
|
# https://clang.llvm.org/docs/ThinLTO.html
|
||||||
# Applies to clang only.
|
# Applies to clang and linux only.
|
||||||
# Disabled when building with tests or sanitizers.
|
# Disabled when building with tests or sanitizers.
|
||||||
option(ENABLE_THINLTO "Clang-specific link time optimization" ON)
|
option(ENABLE_THINLTO "Clang-specific link time optimization" ON)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fstrict-vtable-pointers")
|
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fstrict-vtable-pointers")
|
||||||
|
|
||||||
# Set new experimental pass manager, it's a performance, build time and binary size win.
|
|
||||||
# Can be removed after https://reviews.llvm.org/D66490 merged and released to at least two versions of clang.
|
|
||||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fexperimental-new-pass-manager")
|
|
||||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fexperimental-new-pass-manager")
|
|
||||||
|
|
||||||
# We cannot afford to use LTO when compiling unit tests, and it's not enough
|
# We cannot afford to use LTO when compiling unit tests, and it's not enough
|
||||||
# to only supply -fno-lto at the final linking stage. So we disable it
|
# to only supply -fno-lto at the final linking stage. So we disable it
|
||||||
# completely.
|
# completely.
|
||||||
@ -431,19 +367,12 @@ if (COMPILER_CLANG)
|
|||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
elseif (ENABLE_THINLTO)
|
elseif (ENABLE_THINLTO)
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "ThinLTO is only available with CLang")
|
message (${RECONFIGURE_MESSAGE_LEVEL} "ThinLTO is only available with Clang")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
# Turns on all external libs like s3, kafka, ODBC, ...
|
# Turns on all external libs like s3, kafka, ODBC, ...
|
||||||
option(ENABLE_LIBRARIES "Enable all external libraries by default" ON)
|
option(ENABLE_LIBRARIES "Enable all external libraries by default" ON)
|
||||||
|
|
||||||
if (NOT (OS_LINUX OR OS_DARWIN))
|
|
||||||
# Using system libs can cause a lot of warnings in includes (on macro expansion).
|
|
||||||
option(WERROR "Enable -Werror compiler option" OFF)
|
|
||||||
else ()
|
|
||||||
option(WERROR "Enable -Werror compiler option" ON)
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
# Increase stack size on Musl. We need big stack for our recursive-descend parser.
|
# Increase stack size on Musl. We need big stack for our recursive-descend parser.
|
||||||
if (USE_MUSL)
|
if (USE_MUSL)
|
||||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,-z,stack-size=2097152")
|
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,-z,stack-size=2097152")
|
||||||
@ -458,18 +387,33 @@ elseif (OS_DARWIN)
|
|||||||
include(cmake/darwin/default_libs.cmake)
|
include(cmake/darwin/default_libs.cmake)
|
||||||
elseif (OS_FREEBSD)
|
elseif (OS_FREEBSD)
|
||||||
include(cmake/freebsd/default_libs.cmake)
|
include(cmake/freebsd/default_libs.cmake)
|
||||||
|
else()
|
||||||
|
link_libraries(global-group)
|
||||||
endif ()
|
endif ()
|
||||||
link_libraries(global-group)
|
|
||||||
|
option (ENABLE_GWP_ASAN "Enable Gwp-Asan" ON)
|
||||||
|
# We use mmap for allocations more heavily in debug builds,
|
||||||
|
# but GWP-ASan also wants to use mmap frequently,
|
||||||
|
# and due to a large number of memory mappings,
|
||||||
|
# it does not work together well.
|
||||||
|
if ((NOT OS_LINUX AND NOT OS_ANDROID) OR (CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG"))
|
||||||
|
set(ENABLE_GWP_ASAN OFF)
|
||||||
|
endif ()
|
||||||
|
|
||||||
|
option (ENABLE_FIU "Enable Fiu" ON)
|
||||||
|
|
||||||
|
option(WERROR "Enable -Werror compiler option" ON)
|
||||||
|
|
||||||
if (WERROR)
|
if (WERROR)
|
||||||
# Don't pollute CMAKE_CXX_FLAGS with -Werror as it will break some CMake checks.
|
# Don't pollute CMAKE_CXX_FLAGS with -Werror as it will break some CMake checks.
|
||||||
# Instead, adopt modern cmake usage requirement.
|
# Instead, adopt modern cmake usage requirement.
|
||||||
|
# TODO: Set CMAKE_COMPILE_WARNING_AS_ERROR (cmake 3.24)
|
||||||
target_compile_options(global-group INTERFACE "-Werror")
|
target_compile_options(global-group INTERFACE "-Werror")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
# Make this extra-checks for correct library dependencies.
|
# Make this extra-checks for correct library dependencies.
|
||||||
if (OS_LINUX AND NOT SANITIZE)
|
if (OS_LINUX AND NOT SANITIZE)
|
||||||
target_link_options(global-group INTERFACE "-Wl,--no-undefined")
|
target_link_options(global-group INTERFACE "LINKER:--no-undefined")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
######################################
|
######################################
|
||||||
@ -478,22 +422,16 @@ endif ()
|
|||||||
|
|
||||||
set (CMAKE_POSTFIX_VARIABLE "CMAKE_${CMAKE_BUILD_TYPE_UC}_POSTFIX")
|
set (CMAKE_POSTFIX_VARIABLE "CMAKE_${CMAKE_BUILD_TYPE_UC}_POSTFIX")
|
||||||
|
|
||||||
if (USE_STATIC_LIBRARIES)
|
if (NOT SANITIZE)
|
||||||
set (CMAKE_POSITION_INDEPENDENT_CODE OFF)
|
set (CMAKE_POSITION_INDEPENDENT_CODE OFF)
|
||||||
if (OS_LINUX AND NOT ARCH_ARM)
|
endif()
|
||||||
# Slightly more efficient code can be generated
|
|
||||||
# It's disabled for ARM because otherwise ClickHouse cannot run on Android.
|
if (OS_LINUX AND NOT (ARCH_AARCH64 OR ARCH_S390X) AND NOT SANITIZE)
|
||||||
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -fno-pie")
|
# Slightly more efficient code can be generated
|
||||||
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -fno-pie")
|
# It's disabled for ARM because otherwise ClickHouse cannot run on Android.
|
||||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -no-pie -Wl,-no-pie")
|
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -fno-pie")
|
||||||
endif ()
|
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -fno-pie")
|
||||||
else ()
|
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -no-pie -Wl,-no-pie")
|
||||||
set (CMAKE_POSITION_INDEPENDENT_CODE ON)
|
|
||||||
# This is required for clang on Arch linux, that uses PIE by default.
|
|
||||||
# See enable-SSP-and-PIE-by-default.patch [1].
|
|
||||||
#
|
|
||||||
# [1]: https://github.com/archlinux/svntogit-packages/blob/6e681aa860e65ad46a1387081482eb875c2200f2/trunk/enable-SSP-and-PIE-by-default.patch
|
|
||||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -no-pie")
|
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (ENABLE_TESTS)
|
if (ENABLE_TESTS)
|
||||||
@ -504,6 +442,15 @@ endif ()
|
|||||||
|
|
||||||
enable_testing() # Enable for tests without binary
|
enable_testing() # Enable for tests without binary
|
||||||
|
|
||||||
|
option(ENABLE_OPENSSL "This option performs a build with OpenSSL. NOTE! This option is insecure and should never be used. By default, ClickHouse uses and only supports BoringSSL" OFF)
|
||||||
|
|
||||||
|
if (ARCH_S390X)
|
||||||
|
set(ENABLE_OPENSSL_DYNAMIC_DEFAULT ON)
|
||||||
|
else ()
|
||||||
|
set(ENABLE_OPENSSL_DYNAMIC_DEFAULT OFF)
|
||||||
|
endif ()
|
||||||
|
option(ENABLE_OPENSSL_DYNAMIC "This option removes SSL from ClickHouse and will link to the OpenSSL version supplied by OS." ${ENABLE_OPENSSL_DYNAMIC_DEFAULT})
|
||||||
|
|
||||||
# when installing to /usr - place configs to /etc but for /usr/local place to /usr/local/etc
|
# when installing to /usr - place configs to /etc but for /usr/local place to /usr/local/etc
|
||||||
if (CMAKE_INSTALL_PREFIX STREQUAL "/usr")
|
if (CMAKE_INSTALL_PREFIX STREQUAL "/usr")
|
||||||
set (CLICKHOUSE_ETC_DIR "/etc")
|
set (CLICKHOUSE_ETC_DIR "/etc")
|
||||||
@ -511,11 +458,7 @@ else ()
|
|||||||
set (CLICKHOUSE_ETC_DIR "${CMAKE_INSTALL_PREFIX}/etc")
|
set (CLICKHOUSE_ETC_DIR "${CMAKE_INSTALL_PREFIX}/etc")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
message (STATUS
|
message (STATUS "Building for: ${CMAKE_SYSTEM} ${CMAKE_SYSTEM_PROCESSOR} ${CMAKE_LIBRARY_ARCHITECTURE}")
|
||||||
"Building for: ${CMAKE_SYSTEM} ${CMAKE_SYSTEM_PROCESSOR} ${CMAKE_LIBRARY_ARCHITECTURE} ;
|
|
||||||
USE_STATIC_LIBRARIES=${USE_STATIC_LIBRARIES}
|
|
||||||
SPLIT_SHARED=${SPLIT_SHARED_LIBRARIES}
|
|
||||||
CCACHE=${CCACHE_FOUND} ${CCACHE_VERSION}")
|
|
||||||
|
|
||||||
include (GNUInstallDirs)
|
include (GNUInstallDirs)
|
||||||
|
|
||||||
@ -531,14 +474,14 @@ if (NOT ENABLE_JEMALLOC)
|
|||||||
message (WARNING "Non default allocator is disabled. This is not recommended for production builds.")
|
message (WARNING "Non default allocator is disabled. This is not recommended for production builds.")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
macro (add_executable target)
|
macro (clickhouse_add_executable target)
|
||||||
# invoke built-in add_executable
|
# invoke built-in add_executable
|
||||||
# explicitly acquire and interpose malloc symbols by clickhouse_malloc
|
# explicitly acquire and interpose malloc symbols by clickhouse_malloc
|
||||||
# if GLIBC_COMPATIBILITY is ON and ENABLE_THINLTO is on than provide memcpy symbol explicitly to neutrialize thinlto's libcall generation.
|
# if GLIBC_COMPATIBILITY is ON and ENABLE_THINLTO is on than provide memcpy symbol explicitly to neutrialize thinlto's libcall generation.
|
||||||
if (ARCH_AMD64 AND GLIBC_COMPATIBILITY AND ENABLE_THINLTO)
|
if (ARCH_AMD64 AND GLIBC_COMPATIBILITY AND ENABLE_THINLTO)
|
||||||
_add_executable (${ARGV} $<TARGET_OBJECTS:clickhouse_malloc> $<TARGET_OBJECTS:memcpy>)
|
add_executable (${ARGV} $<TARGET_OBJECTS:clickhouse_malloc> $<TARGET_OBJECTS:memcpy>)
|
||||||
else ()
|
else ()
|
||||||
_add_executable (${ARGV} $<TARGET_OBJECTS:clickhouse_malloc>)
|
add_executable (${ARGV} $<TARGET_OBJECTS:clickhouse_malloc>)
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
get_target_property (type ${target} TYPE)
|
get_target_property (type ${target} TYPE)
|
||||||
@ -561,22 +504,124 @@ macro (add_executable target)
|
|||||||
# - _je_zone_register due to JEMALLOC_PRIVATE_NAMESPACE=je_ under OS X.
|
# - _je_zone_register due to JEMALLOC_PRIVATE_NAMESPACE=je_ under OS X.
|
||||||
# - but jemalloc-cmake does not run private_namespace.sh
|
# - but jemalloc-cmake does not run private_namespace.sh
|
||||||
# so symbol name should be _zone_register
|
# so symbol name should be _zone_register
|
||||||
if (ENABLE_JEMALLOC AND USE_STATIC_LIBRARIES AND OS_DARWIN)
|
if (ENABLE_JEMALLOC AND OS_DARWIN)
|
||||||
set_property(TARGET ${target} APPEND PROPERTY LINK_OPTIONS -u_zone_register)
|
set_property(TARGET ${target} APPEND PROPERTY LINK_OPTIONS -u_zone_register)
|
||||||
endif()
|
endif()
|
||||||
endif()
|
endif()
|
||||||
endmacro()
|
endmacro()
|
||||||
|
|
||||||
set(ConfigIncludePath ${CMAKE_CURRENT_BINARY_DIR}/includes/configs CACHE INTERNAL "Path to generated configuration files.")
|
# With cross-compiling, all targets are built for the target platform which usually different from the host
|
||||||
include_directories(${ConfigIncludePath})
|
# platform. This is problematic if a build artifact X (e.g. a file or an executable) is generated by running
|
||||||
|
# another executable Y previously produced in the build. This is solved by compiling and running Y for/on
|
||||||
|
# the host platform. Add target to the list:
|
||||||
|
# add_native_target(<target> ...)
|
||||||
|
set_property (GLOBAL PROPERTY NATIVE_BUILD_TARGETS)
|
||||||
|
function (add_native_target)
|
||||||
|
set_property (GLOBAL APPEND PROPERTY NATIVE_BUILD_TARGETS ${ARGV})
|
||||||
|
endfunction (add_native_target)
|
||||||
|
|
||||||
|
set(CONFIG_INCLUDE_PATH ${CMAKE_CURRENT_BINARY_DIR}/includes/configs CACHE INTERNAL "Path to generated configuration files.")
|
||||||
|
include_directories(${CONFIG_INCLUDE_PATH})
|
||||||
|
|
||||||
# Add as many warnings as possible for our own code.
|
# Add as many warnings as possible for our own code.
|
||||||
include (cmake/warnings.cmake)
|
include (cmake/warnings.cmake)
|
||||||
include (cmake/print_flags.cmake)
|
include (cmake/print_flags.cmake)
|
||||||
|
|
||||||
|
if (ENABLE_RUST)
|
||||||
|
add_subdirectory (rust)
|
||||||
|
|
||||||
|
# With LTO Rust adds few symbols with global visiblity, the most common is
|
||||||
|
# rust_eh_personality. And this leads to linking errors because multiple
|
||||||
|
# Rust libraries contains the same symbol.
|
||||||
|
#
|
||||||
|
# If it was shared library, that we could use version script for linker to
|
||||||
|
# hide this symbols, but libraries are static.
|
||||||
|
#
|
||||||
|
# we could in theory compile everything to one library but this will be a
|
||||||
|
# mess
|
||||||
|
#
|
||||||
|
# But this should be OK since CI has lots of other builds that are done
|
||||||
|
# without LTO and it will find multiple definitions if there will be any.
|
||||||
|
#
|
||||||
|
# More information about this behaviour in Rust can be found here
|
||||||
|
# - https://github.com/rust-lang/rust/issues/44322
|
||||||
|
# - https://alanwu.space/post/symbol-hygiene/
|
||||||
|
if (ENABLE_THINLTO)
|
||||||
|
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--allow-multiple-definition")
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO" AND NOT SANITIZE AND OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64))
|
||||||
|
set(CHECK_LARGE_OBJECT_SIZES_DEFAULT ON)
|
||||||
|
else ()
|
||||||
|
set(CHECK_LARGE_OBJECT_SIZES_DEFAULT OFF)
|
||||||
|
endif ()
|
||||||
|
option(CHECK_LARGE_OBJECT_SIZES "Check that there are no large object files after build." ${CHECK_LARGE_OBJECT_SIZES_DEFAULT})
|
||||||
|
|
||||||
add_subdirectory (base)
|
add_subdirectory (base)
|
||||||
add_subdirectory (src)
|
add_subdirectory (src)
|
||||||
add_subdirectory (programs)
|
add_subdirectory (programs)
|
||||||
add_subdirectory (utils)
|
add_subdirectory (utils)
|
||||||
|
|
||||||
include (cmake/sanitize_target_link_libraries.cmake)
|
if (FUZZER)
|
||||||
|
# Bundle fuzzers target
|
||||||
|
add_custom_target(fuzzers)
|
||||||
|
# Instrument all targets fuzzer and link with libfuzzer
|
||||||
|
get_all_targets(all_targets)
|
||||||
|
foreach(target ${all_targets})
|
||||||
|
if (NOT(target STREQUAL "_fuzzer" OR target STREQUAL "_fuzzer_no_main"))
|
||||||
|
get_target_property(target_type ${target} TYPE)
|
||||||
|
if (NOT(target_type STREQUAL "INTERFACE_LIBRARY" OR target_type STREQUAL "UTILITY"))
|
||||||
|
target_compile_options(${target} PRIVATE "-fsanitize=fuzzer-no-link")
|
||||||
|
endif()
|
||||||
|
# clickhouse fuzzer isn't working correctly
|
||||||
|
# initial PR https://github.com/ClickHouse/ClickHouse/pull/27526
|
||||||
|
#if (target MATCHES ".+_fuzzer" OR target STREQUAL "clickhouse")
|
||||||
|
if (target_type STREQUAL "EXECUTABLE" AND target MATCHES ".+_fuzzer")
|
||||||
|
message(STATUS "${target} instrumented with fuzzer")
|
||||||
|
target_link_libraries(${target} PUBLIC ch_contrib::fuzzer)
|
||||||
|
# Add to fuzzers bundle
|
||||||
|
add_dependencies(fuzzers ${target})
|
||||||
|
get_target_filename(${target} target_bin_name)
|
||||||
|
get_target_property(target_bin_dir ${target} BINARY_DIR)
|
||||||
|
add_custom_command(TARGET fuzzers POST_BUILD COMMAND mv "${target_bin_dir}/${target_bin_name}" "${CMAKE_CURRENT_BINARY_DIR}/programs/" VERBATIM)
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
add_custom_command(TARGET fuzzers POST_BUILD COMMAND SRC=${CMAKE_SOURCE_DIR} BIN=${CMAKE_BINARY_DIR} OUT=${CMAKE_BINARY_DIR}/programs ${CMAKE_SOURCE_DIR}/tests/fuzz/build.sh VERBATIM)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
include (cmake/sanitize_targets.cmake)
|
||||||
|
|
||||||
|
# Build native targets if necessary
|
||||||
|
get_property(NATIVE_BUILD_TARGETS GLOBAL PROPERTY NATIVE_BUILD_TARGETS)
|
||||||
|
if (NATIVE_BUILD_TARGETS
|
||||||
|
AND NOT(
|
||||||
|
CMAKE_HOST_SYSTEM_NAME STREQUAL CMAKE_SYSTEM_NAME
|
||||||
|
AND CMAKE_HOST_SYSTEM_PROCESSOR STREQUAL CMAKE_SYSTEM_PROCESSOR
|
||||||
|
)
|
||||||
|
)
|
||||||
|
message (STATUS "Building native targets...")
|
||||||
|
|
||||||
|
set (NATIVE_BUILD_DIR "${PROJECT_BINARY_DIR}/native")
|
||||||
|
|
||||||
|
execute_process(
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E make_directory "${NATIVE_BUILD_DIR}"
|
||||||
|
COMMAND_ECHO STDOUT)
|
||||||
|
|
||||||
|
execute_process(
|
||||||
|
COMMAND ${CMAKE_COMMAND}
|
||||||
|
"-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}"
|
||||||
|
"-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}"
|
||||||
|
"-DCOMPILER_CACHE=${COMPILER_CACHE}"
|
||||||
|
# Avoid overriding .cargo/config.toml with native toolchain.
|
||||||
|
"-DENABLE_RUST=OFF"
|
||||||
|
"-DENABLE_CLICKHOUSE_SELF_EXTRACTING=${ENABLE_CLICKHOUSE_SELF_EXTRACTING}"
|
||||||
|
${PROJECT_SOURCE_DIR}
|
||||||
|
WORKING_DIRECTORY "${NATIVE_BUILD_DIR}"
|
||||||
|
COMMAND_ECHO STDOUT)
|
||||||
|
|
||||||
|
execute_process(
|
||||||
|
COMMAND ${CMAKE_COMMAND} --build "${NATIVE_BUILD_DIR}" --target ${NATIVE_BUILD_TARGETS}
|
||||||
|
COMMAND_ECHO STDOUT)
|
||||||
|
endif ()
|
||||||
|
4
LICENSE
4
LICENSE
@ -1,4 +1,4 @@
|
|||||||
Copyright 2016-2022 ClickHouse, Inc.
|
Copyright 2016-2023 ClickHouse, Inc.
|
||||||
|
|
||||||
Apache License
|
Apache License
|
||||||
Version 2.0, January 2004
|
Version 2.0, January 2004
|
||||||
@ -188,7 +188,7 @@ Copyright 2016-2022 ClickHouse, Inc.
|
|||||||
same "printed page" as the copyright notice for easier
|
same "printed page" as the copyright notice for easier
|
||||||
identification within third-party archives.
|
identification within third-party archives.
|
||||||
|
|
||||||
Copyright 2016-2022 ClickHouse, Inc.
|
Copyright 2016-2023 ClickHouse, Inc.
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
you may not use this file except in compliance with the License.
|
you may not use this file except in compliance with the License.
|
||||||
|
@ -7,34 +7,92 @@
|
|||||||
# How to install Ninja on Ubuntu:
|
# How to install Ninja on Ubuntu:
|
||||||
# sudo apt-get install ninja-build
|
# sudo apt-get install ninja-build
|
||||||
|
|
||||||
# CLion does not support Ninja
|
|
||||||
# You can add your vote on CLion task tracker:
|
|
||||||
# https://youtrack.jetbrains.com/issue/CPP-2659
|
|
||||||
# https://youtrack.jetbrains.com/issue/CPP-870
|
|
||||||
|
|
||||||
if (NOT DEFINED ENV{CLION_IDE} AND NOT DEFINED ENV{XCODE_IDE})
|
if (NOT DEFINED ENV{XCODE_IDE})
|
||||||
find_program(NINJA_PATH ninja)
|
find_program(NINJA_PATH ninja)
|
||||||
if (NINJA_PATH)
|
if (NINJA_PATH)
|
||||||
set(CMAKE_GENERATOR "Ninja" CACHE INTERNAL "" FORCE)
|
set(CMAKE_GENERATOR "Ninja" CACHE INTERNAL "")
|
||||||
endif ()
|
endif ()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
|
# Check if environment is polluted.
|
||||||
|
if (NOT "$ENV{CFLAGS}" STREQUAL ""
|
||||||
|
OR NOT "$ENV{CXXFLAGS}" STREQUAL ""
|
||||||
|
OR NOT "$ENV{LDFLAGS}" STREQUAL ""
|
||||||
|
OR CMAKE_C_FLAGS OR CMAKE_CXX_FLAGS OR CMAKE_EXE_LINKER_FLAGS OR CMAKE_MODULE_LINKER_FLAGS
|
||||||
|
OR CMAKE_C_FLAGS_INIT OR CMAKE_CXX_FLAGS_INIT OR CMAKE_EXE_LINKER_FLAGS_INIT OR CMAKE_MODULE_LINKER_FLAGS_INIT)
|
||||||
|
|
||||||
|
# if $ENV
|
||||||
|
message("CFLAGS: $ENV{CFLAGS}")
|
||||||
|
message("CXXFLAGS: $ENV{CXXFLAGS}")
|
||||||
|
message("LDFLAGS: $ENV{LDFLAGS}")
|
||||||
|
# if *_FLAGS
|
||||||
|
message("CMAKE_C_FLAGS: ${CMAKE_C_FLAGS}")
|
||||||
|
message("CMAKE_CXX_FLAGS: ${CMAKE_CXX_FLAGS}")
|
||||||
|
message("CMAKE_EXE_LINKER_FLAGS: ${CMAKE_EXE_LINKER_FLAGS}")
|
||||||
|
message("CMAKE_SHARED_LINKER_FLAGS: ${CMAKE_SHARED_LINKER_FLAGS}")
|
||||||
|
message("CMAKE_MODULE_LINKER_FLAGS: ${CMAKE_MODULE_LINKER_FLAGS}")
|
||||||
|
# if *_FLAGS_INIT
|
||||||
|
message("CMAKE_C_FLAGS_INIT: ${CMAKE_C_FLAGS_INIT}")
|
||||||
|
message("CMAKE_CXX_FLAGS_INIT: ${CMAKE_CXX_FLAGS_INIT}")
|
||||||
|
message("CMAKE_EXE_LINKER_FLAGS_INIT: ${CMAKE_EXE_LINKER_FLAGS_INIT}")
|
||||||
|
message("CMAKE_MODULE_LINKER_FLAGS_INIT: ${CMAKE_MODULE_LINKER_FLAGS_INIT}")
|
||||||
|
|
||||||
|
message(FATAL_ERROR "
|
||||||
|
Some of the variables like CFLAGS, CXXFLAGS, LDFLAGS are not empty.
|
||||||
|
It is not possible to build ClickHouse with custom flags.
|
||||||
|
These variables can be set up by previous invocation of some other build tools.
|
||||||
|
You should cleanup these variables and start over again.
|
||||||
|
|
||||||
|
Run the `env` command to check the details.
|
||||||
|
You will also need to remove the contents of the build directory.
|
||||||
|
|
||||||
|
Note: if you don't like this behavior, you can manually edit the cmake files, but please don't complain to developers.")
|
||||||
|
endif()
|
||||||
|
|
||||||
# Default toolchain - this is needed to avoid dependency on OS files.
|
# Default toolchain - this is needed to avoid dependency on OS files.
|
||||||
execute_process(COMMAND uname -s OUTPUT_VARIABLE OS)
|
execute_process(COMMAND uname -s OUTPUT_VARIABLE OS)
|
||||||
execute_process(COMMAND uname -m OUTPUT_VARIABLE ARCH)
|
execute_process(COMMAND uname -m OUTPUT_VARIABLE ARCH)
|
||||||
|
|
||||||
|
# By default, prefer clang on Linux
|
||||||
|
# But note, that you still may change the compiler with -DCMAKE_C_COMPILER/-DCMAKE_CXX_COMPILER.
|
||||||
|
if (OS MATCHES "Linux"
|
||||||
|
AND "$ENV{CC}" STREQUAL ""
|
||||||
|
AND "$ENV{CXX}" STREQUAL ""
|
||||||
|
AND NOT DEFINED CMAKE_C_COMPILER
|
||||||
|
AND NOT DEFINED CMAKE_CXX_COMPILER)
|
||||||
|
find_program(CLANG_PATH clang)
|
||||||
|
if (CLANG_PATH)
|
||||||
|
set(CMAKE_C_COMPILER "clang" CACHE INTERNAL "")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
find_program(CLANG_CXX_PATH clang++)
|
||||||
|
if (CLANG_CXX_PATH)
|
||||||
|
set(CMAKE_CXX_COMPILER "clang++" CACHE INTERNAL "")
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
if (OS MATCHES "Linux"
|
if (OS MATCHES "Linux"
|
||||||
AND NOT DEFINED CMAKE_TOOLCHAIN_FILE
|
AND NOT DEFINED CMAKE_TOOLCHAIN_FILE
|
||||||
AND NOT DISABLE_HERMETIC_BUILD
|
AND NOT DISABLE_HERMETIC_BUILD
|
||||||
AND ($ENV{CC} MATCHES ".*clang.*" OR CMAKE_C_COMPILER MATCHES ".*clang.*"))
|
AND ("$ENV{CC}" MATCHES ".*clang.*" OR CMAKE_C_COMPILER MATCHES ".*clang.*"))
|
||||||
|
|
||||||
if (ARCH MATCHES "amd64|x86_64")
|
if (ARCH MATCHES "amd64|x86_64")
|
||||||
set (CMAKE_TOOLCHAIN_FILE "cmake/linux/toolchain-x86_64.cmake" CACHE INTERNAL "" FORCE)
|
# NOTE: right now musl is not ready, since unwind is too slow with it
|
||||||
|
#
|
||||||
|
# FWIW the following had been tried:
|
||||||
|
# - update musl
|
||||||
|
# - compile musl with debug
|
||||||
|
# - compile musl with debug and -fasynchronous-unwind-tables
|
||||||
|
#
|
||||||
|
# But none of this changes anything so far.
|
||||||
|
set (CMAKE_TOOLCHAIN_FILE "cmake/linux/toolchain-x86_64.cmake" CACHE INTERNAL "")
|
||||||
elseif (ARCH MATCHES "^(aarch64.*|AARCH64.*|arm64.*|ARM64.*)")
|
elseif (ARCH MATCHES "^(aarch64.*|AARCH64.*|arm64.*|ARM64.*)")
|
||||||
set (CMAKE_TOOLCHAIN_FILE "cmake/linux/toolchain-aarch64.cmake" CACHE INTERNAL "" FORCE)
|
set (CMAKE_TOOLCHAIN_FILE "cmake/linux/toolchain-aarch64.cmake" CACHE INTERNAL "")
|
||||||
elseif (ARCH MATCHES "^(ppc64le.*|PPC64LE.*)")
|
elseif (ARCH MATCHES "^(ppc64le.*|PPC64LE.*)")
|
||||||
set (CMAKE_TOOLCHAIN_FILE "cmake/linux/toolchain-ppc64le.cmake" CACHE INTERNAL "" FORCE)
|
set (CMAKE_TOOLCHAIN_FILE "cmake/linux/toolchain-ppc64le.cmake" CACHE INTERNAL "")
|
||||||
|
elseif (ARCH MATCHES "^(s390x.*|S390X.*)")
|
||||||
|
set (CMAKE_TOOLCHAIN_FILE "cmake/linux/toolchain-s390x.cmake" CACHE INTERNAL "")
|
||||||
else ()
|
else ()
|
||||||
message (FATAL_ERROR "Unsupported architecture: ${ARCH}")
|
message (FATAL_ERROR "Unsupported architecture: ${ARCH}")
|
||||||
endif ()
|
endif ()
|
||||||
|
55
README.md
55
README.md
@ -1,15 +1,56 @@
|
|||||||
[![ClickHouse — open source distributed column-oriented DBMS](https://github.com/ClickHouse/ClickHouse/raw/master/website/images/logo-400x240.png)](https://clickhouse.com)
|
<div align=center>
|
||||||
|
|
||||||
ClickHouse® is an open-source column-oriented database management system that allows generating analytical data reports in real-time.
|
[![Website](https://img.shields.io/website?up_message=AVAILABLE&down_message=DOWN&url=https%3A%2F%2Fclickhouse.com&style=for-the-badge)](https://clickhouse.com)
|
||||||
|
[![Apache 2.0 License](https://img.shields.io/badge/license-Apache%202.0-blueviolet?style=for-the-badge)](https://www.apache.org/licenses/LICENSE-2.0)
|
||||||
|
|
||||||
|
<picture align=center>
|
||||||
|
<source media="(prefers-color-scheme: dark)" srcset="https://github.com/ClickHouse/clickhouse-docs/assets/9611008/4ef9c104-2d3f-4646-b186-507358d2fe28">
|
||||||
|
<source media="(prefers-color-scheme: light)" srcset="https://github.com/ClickHouse/clickhouse-docs/assets/9611008/b001dc7b-5a45-4dcd-9275-e03beb7f9177">
|
||||||
|
<img alt="The ClickHouse company logo." src="https://github.com/ClickHouse/clickhouse-docs/assets/9611008/b001dc7b-5a45-4dcd-9275-e03beb7f9177">
|
||||||
|
</picture>
|
||||||
|
|
||||||
|
<h4>ClickHouse® is an open-source column-oriented database management system that allows generating analytical data reports in real-time.</h4>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
## How To Install (Linux, macOS, FreeBSD)
|
||||||
|
```
|
||||||
|
curl https://clickhouse.com/ | sh
|
||||||
|
```
|
||||||
|
|
||||||
## Useful Links
|
## Useful Links
|
||||||
|
|
||||||
* [Official website](https://clickhouse.com/) has a quick high-level overview of ClickHouse on the main page.
|
* [Official website](https://clickhouse.com/) has a quick high-level overview of ClickHouse on the main page.
|
||||||
|
* [ClickHouse Cloud](https://clickhouse.cloud) ClickHouse as a service, built by the creators and maintainers.
|
||||||
* [Tutorial](https://clickhouse.com/docs/en/getting_started/tutorial/) shows how to set up and query a small ClickHouse cluster.
|
* [Tutorial](https://clickhouse.com/docs/en/getting_started/tutorial/) shows how to set up and query a small ClickHouse cluster.
|
||||||
* [Documentation](https://clickhouse.com/docs/en/) provides more in-depth information.
|
* [Documentation](https://clickhouse.com/docs/en/) provides more in-depth information.
|
||||||
* [YouTube channel](https://www.youtube.com/c/ClickHouseDB) has a lot of content about ClickHouse in video format.
|
* [YouTube channel](https://www.youtube.com/c/ClickHouseDB) has a lot of content about ClickHouse in video format.
|
||||||
* [Slack](https://join.slack.com/t/clickhousedb/shared_invite/zt-rxm3rdrk-lIUmhLC3V8WTaL0TGxsOmg) and [Telegram](https://telegram.me/clickhouse_en) allow chatting with ClickHouse users in real-time.
|
* [Slack](https://clickhouse.com/slack) and [Telegram](https://telegram.me/clickhouse_en) allow chatting with ClickHouse users in real-time.
|
||||||
* [Blog](https://clickhouse.com/blog/en/) contains various ClickHouse-related articles, as well as announcements and reports about events.
|
* [Blog](https://clickhouse.com/blog/) contains various ClickHouse-related articles, as well as announcements and reports about events.
|
||||||
* [Code Browser (Woboq)](https://clickhouse.com/codebrowser/ClickHouse/index.html) with syntax highlight and navigation.
|
* [Code Browser (github.dev)](https://github.dev/ClickHouse/ClickHouse) with syntax highlighting, powered by github.dev.
|
||||||
* [Code Browser (github.dev)](https://github.dev/ClickHouse/ClickHouse) with syntax highlight, powered by github.dev.
|
* [Static Analysis (SonarCloud)](https://sonarcloud.io/project/issues?resolved=false&id=ClickHouse_ClickHouse) proposes C++ quality improvements.
|
||||||
* [Contacts](https://clickhouse.com/company/#contact) can help to get your questions answered if there are any.
|
* [Contacts](https://clickhouse.com/company/contact) can help to get your questions answered if there are any.
|
||||||
|
|
||||||
|
## Upcoming Events
|
||||||
|
|
||||||
|
* [**ClickHouse Meetup in San Francisco**](https://www.meetup.com/clickhouse-silicon-valley-meetup-group/events/296334923/) - Nov 14
|
||||||
|
* [**ClickHouse Meetup in Singapore**](https://www.meetup.com/clickhouse-singapore-meetup-group/events/296334976/) - Nov 15
|
||||||
|
* [**ClickHouse Meetup in Berlin**](https://www.meetup.com/clickhouse-berlin-user-group/events/296488501/) - Nov 30
|
||||||
|
* [**ClickHouse Meetup in NYC**](https://www.meetup.com/clickhouse-new-york-user-group/events/296488779/) - Dec 11
|
||||||
|
* [**ClickHouse Meetup in Boston**](https://www.meetup.com/clickhouse-boston-user-group/events/296488840/) - Dec 12
|
||||||
|
|
||||||
|
Also, keep an eye out for upcoming meetups around the world. Somewhere else you want us to be? Please feel free to reach out to tyler <at> clickhouse <dot> com.
|
||||||
|
|
||||||
|
## Recent Recordings
|
||||||
|
* **Recent Meetup Videos**: [Meetup Playlist](https://www.youtube.com/playlist?list=PL0Z2YDlm0b3iNDUzpY1S3L_iV4nARda_U) Whenever possible recordings of the ClickHouse Community Meetups are edited and presented as individual talks. Current featuring "Modern SQL in 2023", "Fast, Concurrent, and Consistent Asynchronous INSERTS in ClickHouse", and "Full-Text Indices: Design and Experiments"
|
||||||
|
* **Recording available**: [**v23.10 Release Webinar**](https://www.youtube.com/watch?v=PGQS6uPb970) All the features of 23.10, one convenient video! Watch it now!
|
||||||
|
* **All release webinar recordings**: [YouTube playlist](https://www.youtube.com/playlist?list=PL0Z2YDlm0b3jAlSy1JxyP8zluvXaN3nxU)
|
||||||
|
|
||||||
|
|
||||||
|
## Interested in joining ClickHouse and making it your full-time job?
|
||||||
|
|
||||||
|
We are a globally diverse and distributed team, united behind a common goal of creating industry-leading, real-time analytics. Here, you will have an opportunity to solve some of the most cutting-edge technical challenges and have direct ownership of your work and vision. If you are a contributor by nature, a thinker and a doer - we’ll definitely click!
|
||||||
|
|
||||||
|
Check out our **current openings** here: https://clickhouse.com/company/careers
|
||||||
|
|
||||||
|
Can't find what you are looking for, but want to let us know you are interested in joining ClickHouse? Email careers@clickhouse.com!
|
||||||
|
48
SECURITY.md
48
SECURITY.md
@ -1,3 +1,7 @@
|
|||||||
|
<!--
|
||||||
|
the file is autogenerated by utils/security-generator/generate_security.py
|
||||||
|
-->
|
||||||
|
|
||||||
# Security Policy
|
# Security Policy
|
||||||
|
|
||||||
## Security Announcements
|
## Security Announcements
|
||||||
@ -7,33 +11,30 @@ Security fixes will be announced by posting them in the [security changelog](htt
|
|||||||
|
|
||||||
The following versions of ClickHouse server are currently being supported with security updates:
|
The following versions of ClickHouse server are currently being supported with security updates:
|
||||||
|
|
||||||
| Version | Supported |
|
| Version | Supported |
|
||||||
| ------- | ------------------ |
|
|:-|:-|
|
||||||
| 1.x | :x: |
|
| 23.10 | ✔️ |
|
||||||
| 18.x | :x: |
|
| 23.9 | ✔️ |
|
||||||
| 19.x | :x: |
|
| 23.8 | ✔️ |
|
||||||
| 20.x | :x: |
|
| 23.7 | ❌ |
|
||||||
| 21.1 | :x: |
|
| 23.6 | ❌ |
|
||||||
| 21.2 | :x: |
|
| 23.5 | ❌ |
|
||||||
| 21.3 | :x: |
|
| 23.4 | ❌ |
|
||||||
| 21.4 | :x: |
|
| 23.3 | ✔️ |
|
||||||
| 21.5 | :x: |
|
| 23.2 | ❌ |
|
||||||
| 21.6 | :x: |
|
| 23.1 | ❌ |
|
||||||
| 21.7 | :x: |
|
| 22.* | ❌ |
|
||||||
| 21.8 | ✅ |
|
| 21.* | ❌ |
|
||||||
| 21.9 | :x: |
|
| 20.* | ❌ |
|
||||||
| 21.10 | :x: |
|
| 19.* | ❌ |
|
||||||
| 21.11 | :x: |
|
| 18.* | ❌ |
|
||||||
| 21.12 | :x: |
|
| 1.* | ❌ |
|
||||||
| 22.1 | ✅ |
|
|
||||||
| 22.2 | ✅ |
|
|
||||||
| 22.3 | ✅ |
|
|
||||||
|
|
||||||
## Reporting a Vulnerability
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
We're extremely grateful for security researchers and users that report vulnerabilities to the ClickHouse Open Source Community. All reports are thoroughly investigated by developers.
|
We're extremely grateful for security researchers and users that report vulnerabilities to the ClickHouse Open Source Community. All reports are thoroughly investigated by developers.
|
||||||
|
|
||||||
To report a potential vulnerability in ClickHouse please send the details about it to [security@clickhouse.com](mailto:security@clickhouse.com).
|
To report a potential vulnerability in ClickHouse please send the details about it to [security@clickhouse.com](mailto:security@clickhouse.com). We do not offer any financial rewards for reporting issues to us using this method. Alternatively, you can also submit your findings through our public bug bounty program hosted by [Bugcrowd](https://bugcrowd.com/clickhouse) and be rewarded for it as per the program scope and rules of engagement.
|
||||||
|
|
||||||
### When Should I Report a Vulnerability?
|
### When Should I Report a Vulnerability?
|
||||||
|
|
||||||
@ -53,6 +54,5 @@ As the security issue moves from triage, to identified fix, to release planning
|
|||||||
|
|
||||||
## Public Disclosure Timing
|
## Public Disclosure Timing
|
||||||
|
|
||||||
A public disclosure date is negotiated by the ClickHouse maintainers and the bug submitter. We prefer to fully disclose the bug as soon as possible once a user mitigation is available. It is reasonable to delay disclosure when the bug or the fix is not yet fully understood, the solution is not well-tested, or for vendor coordination. The timeframe for disclosure is from immediate (especially if it's already publicly known) to 90 days. For a vulnerability with a straightforward mitigation, we expect report date to disclosure date to be on the order of 7 days.
|
A public disclosure date is negotiated by the ClickHouse maintainers and the bug submitter. We prefer to fully disclose the bug as soon as possible once a user mitigation is available. It is reasonable to delay disclosure when the bug or the fix is not yet fully understood, the solution is not well-tested, or for vendor coordination. The timeframe for disclosure is from immediate (especially if it's already publicly known) to 90 days. For a vulnerability with a straightforward mitigation, we expect the report date to disclosure date to be on the order of 7 days.
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,11 +1,5 @@
|
|||||||
if (USE_CLANG_TIDY)
|
|
||||||
set (CMAKE_CXX_CLANG_TIDY "${CLANG_TIDY_PATH}")
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
add_subdirectory (base)
|
add_subdirectory (base)
|
||||||
add_subdirectory (daemon)
|
|
||||||
add_subdirectory (loggers)
|
|
||||||
add_subdirectory (pcg-random)
|
add_subdirectory (pcg-random)
|
||||||
|
add_subdirectory (poco)
|
||||||
add_subdirectory (widechar_width)
|
add_subdirectory (widechar_width)
|
||||||
add_subdirectory (readpassphrase)
|
add_subdirectory (readpassphrase)
|
||||||
add_subdirectory (bridge)
|
|
||||||
|
@ -10,7 +10,7 @@
|
|||||||
#include <base/MoveOrCopyIfThrow.h>
|
#include <base/MoveOrCopyIfThrow.h>
|
||||||
|
|
||||||
/** Pool for limited size objects that cannot be used from different threads simultaneously.
|
/** Pool for limited size objects that cannot be used from different threads simultaneously.
|
||||||
* The main use case is to have fixed size of objects that can be reused in difference threads during their lifetime
|
* The main use case is to have fixed size of objects that can be reused in different threads during their lifetime
|
||||||
* and have to be initialized on demand.
|
* and have to be initialized on demand.
|
||||||
* Two main properties of pool are allocated objects size and borrowed objects size.
|
* Two main properties of pool are allocated objects size and borrowed objects size.
|
||||||
* Allocated objects size is size of objects that are currently allocated by the pool.
|
* Allocated objects size is size of objects that are currently allocated by the pool.
|
||||||
@ -89,7 +89,7 @@ public:
|
|||||||
inline void returnObject(T && object_to_return)
|
inline void returnObject(T && object_to_return)
|
||||||
{
|
{
|
||||||
{
|
{
|
||||||
std::lock_guard<std::mutex> lock(objects_mutex);
|
std::lock_guard lock(objects_mutex);
|
||||||
|
|
||||||
objects.emplace_back(std::move(object_to_return));
|
objects.emplace_back(std::move(object_to_return));
|
||||||
--borrowed_objects_size;
|
--borrowed_objects_size;
|
||||||
@ -107,14 +107,14 @@ public:
|
|||||||
/// Allocated objects size by the pool. If allocatedObjectsSize == maxSize then pool is full.
|
/// Allocated objects size by the pool. If allocatedObjectsSize == maxSize then pool is full.
|
||||||
inline size_t allocatedObjectsSize() const
|
inline size_t allocatedObjectsSize() const
|
||||||
{
|
{
|
||||||
std::unique_lock<std::mutex> lock(objects_mutex);
|
std::lock_guard lock(objects_mutex);
|
||||||
return allocated_objects_size;
|
return allocated_objects_size;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns allocatedObjectsSize == maxSize
|
/// Returns allocatedObjectsSize == maxSize
|
||||||
inline bool isFull() const
|
inline bool isFull() const
|
||||||
{
|
{
|
||||||
std::unique_lock<std::mutex> lock(objects_mutex);
|
std::lock_guard lock(objects_mutex);
|
||||||
return allocated_objects_size == max_size;
|
return allocated_objects_size == max_size;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -122,7 +122,7 @@ public:
|
|||||||
/// Then client will wait during borrowObject function call.
|
/// Then client will wait during borrowObject function call.
|
||||||
inline size_t borrowedObjectsSize() const
|
inline size_t borrowedObjectsSize() const
|
||||||
{
|
{
|
||||||
std::unique_lock<std::mutex> lock(objects_mutex);
|
std::lock_guard lock(objects_mutex);
|
||||||
return borrowed_objects_size;
|
return borrowed_objects_size;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,3 +1,13 @@
|
|||||||
|
add_compile_options($<$<OR:$<COMPILE_LANGUAGE:C>,$<COMPILE_LANGUAGE:CXX>>:${COVERAGE_FLAGS}>)
|
||||||
|
|
||||||
|
if (USE_CLANG_TIDY)
|
||||||
|
set (CMAKE_CXX_CLANG_TIDY "${CLANG_TIDY_PATH}")
|
||||||
|
endif ()
|
||||||
|
|
||||||
|
# TODO: Remove this. We like to compile with C++23 (set by top-level CMakeLists) but Clang crashes with our libcxx
|
||||||
|
# when instantiated from JSON.cpp. Try again when libcxx(abi) and Clang are upgraded to 16.
|
||||||
|
set (CMAKE_CXX_STANDARD 20)
|
||||||
|
|
||||||
set (SRCS
|
set (SRCS
|
||||||
argsToConfig.cpp
|
argsToConfig.cpp
|
||||||
coverage.cpp
|
coverage.cpp
|
||||||
@ -8,23 +18,18 @@ set (SRCS
|
|||||||
getPageSize.cpp
|
getPageSize.cpp
|
||||||
getThreadId.cpp
|
getThreadId.cpp
|
||||||
JSON.cpp
|
JSON.cpp
|
||||||
LineReader.cpp
|
|
||||||
mremap.cpp
|
mremap.cpp
|
||||||
phdr_cache.cpp
|
phdr_cache.cpp
|
||||||
preciseExp10.cpp
|
preciseExp10.cpp
|
||||||
setTerminalEcho.cpp
|
|
||||||
shift10.cpp
|
shift10.cpp
|
||||||
sleep.cpp
|
sleep.cpp
|
||||||
terminalColors.cpp
|
terminalColors.cpp
|
||||||
errnoToString.cpp
|
errnoToString.cpp
|
||||||
StringRef.cpp
|
StringRef.cpp
|
||||||
safeExit.cpp
|
safeExit.cpp
|
||||||
|
throwError.cpp
|
||||||
)
|
)
|
||||||
|
|
||||||
if (ENABLE_REPLXX)
|
|
||||||
list (APPEND SRCS ReplxxLineReader.cpp)
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
if (USE_DEBUG_HELPERS)
|
if (USE_DEBUG_HELPERS)
|
||||||
get_target_property(MAGIC_ENUM_INCLUDE_DIR ch_contrib::magic_enum INTERFACE_INCLUDE_DIRECTORIES)
|
get_target_property(MAGIC_ENUM_INCLUDE_DIR ch_contrib::magic_enum INTERFACE_INCLUDE_DIRECTORIES)
|
||||||
# CMake generator expression will do insane quoting when it encounters special character like quotes, spaces, etc.
|
# CMake generator expression will do insane quoting when it encounters special character like quotes, spaces, etc.
|
||||||
@ -42,12 +47,12 @@ else ()
|
|||||||
target_compile_definitions(common PUBLIC WITH_COVERAGE=0)
|
target_compile_definitions(common PUBLIC WITH_COVERAGE=0)
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
target_include_directories(common PUBLIC .. "${CMAKE_CURRENT_BINARY_DIR}/..")
|
if (TARGET ch_contrib::crc32_s390x)
|
||||||
|
target_link_libraries(common PUBLIC ch_contrib::crc32_s390x)
|
||||||
if (OS_DARWIN AND NOT USE_STATIC_LIBRARIES)
|
|
||||||
target_link_libraries(common PUBLIC -Wl,-U,_inside_main)
|
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
|
target_include_directories(common PUBLIC .. "${CMAKE_CURRENT_BINARY_DIR}/..")
|
||||||
|
|
||||||
target_link_libraries (common
|
target_link_libraries (common
|
||||||
PUBLIC
|
PUBLIC
|
||||||
ch_contrib::cityhash
|
ch_contrib::cityhash
|
||||||
|
@ -1,75 +0,0 @@
|
|||||||
#pragma once
|
|
||||||
|
|
||||||
#include <map>
|
|
||||||
#include <tuple>
|
|
||||||
#include <mutex>
|
|
||||||
#include "FnTraits.h"
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Caching proxy for a functor that decays to a pointer-to-function.
|
|
||||||
* Saves pairs (func args, func result on args).
|
|
||||||
* Cache size is unlimited. Cache items are evicted only on manual drop.
|
|
||||||
* Invocation/update is O(log(saved cache values)).
|
|
||||||
*
|
|
||||||
* See Common/tests/cached_fn.cpp for examples.
|
|
||||||
*/
|
|
||||||
template <auto * Func>
|
|
||||||
struct CachedFn
|
|
||||||
{
|
|
||||||
private:
|
|
||||||
using Traits = FnTraits<decltype(Func)>;
|
|
||||||
using DecayedArgs = TypeListMap<std::decay_t, typename Traits::Args>;
|
|
||||||
using Key = TypeListChangeRoot<std::tuple, DecayedArgs>;
|
|
||||||
using Result = typename Traits::Ret;
|
|
||||||
|
|
||||||
std::map<Key, Result> cache; // Can't use hashmap as tuples are unhashable by default
|
|
||||||
mutable std::mutex mutex;
|
|
||||||
|
|
||||||
public:
|
|
||||||
template <class ...Args>
|
|
||||||
Result operator()(Args && ...args)
|
|
||||||
{
|
|
||||||
Key key{std::forward<Args>(args)...};
|
|
||||||
|
|
||||||
{
|
|
||||||
std::lock_guard lock(mutex);
|
|
||||||
|
|
||||||
if (auto it = cache.find(key); it != cache.end())
|
|
||||||
return it->second;
|
|
||||||
}
|
|
||||||
|
|
||||||
Result res = std::apply(Func, key);
|
|
||||||
|
|
||||||
{
|
|
||||||
std::lock_guard lock(mutex);
|
|
||||||
cache.emplace(std::move(key), res);
|
|
||||||
}
|
|
||||||
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
template <class ...Args>
|
|
||||||
void update(Args && ...args)
|
|
||||||
{
|
|
||||||
Key key{std::forward<Args>(args)...};
|
|
||||||
Result res = std::apply(Func, key);
|
|
||||||
|
|
||||||
{
|
|
||||||
std::lock_guard lock(mutex);
|
|
||||||
// TODO Can't use emplace(std::move(key), ..), causes test_host_ip_change errors.
|
|
||||||
cache[key] = std::move(res);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
size_t size() const
|
|
||||||
{
|
|
||||||
std::lock_guard lock(mutex);
|
|
||||||
return cache.size();
|
|
||||||
}
|
|
||||||
|
|
||||||
void drop()
|
|
||||||
{
|
|
||||||
std::lock_guard lock(mutex);
|
|
||||||
cache.clear();
|
|
||||||
}
|
|
||||||
};
|
|
@ -1,5 +1,6 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
#include <base/extended_types.h>
|
#include <base/extended_types.h>
|
||||||
|
#include <base/Decimal_fwd.h>
|
||||||
|
|
||||||
#if !defined(NO_SANITIZE_UNDEFINED)
|
#if !defined(NO_SANITIZE_UNDEFINED)
|
||||||
#if defined(__clang__)
|
#if defined(__clang__)
|
||||||
@ -19,23 +20,6 @@ using Decimal64 = Decimal<Int64>;
|
|||||||
using Decimal128 = Decimal<Int128>;
|
using Decimal128 = Decimal<Int128>;
|
||||||
using Decimal256 = Decimal<Int256>;
|
using Decimal256 = Decimal<Int256>;
|
||||||
|
|
||||||
template <class T>
|
|
||||||
concept is_decimal =
|
|
||||||
std::is_same_v<T, Decimal32>
|
|
||||||
|| std::is_same_v<T, Decimal64>
|
|
||||||
|| std::is_same_v<T, Decimal128>
|
|
||||||
|| std::is_same_v<T, Decimal256>
|
|
||||||
|| std::is_same_v<T, DateTime64>;
|
|
||||||
|
|
||||||
template <class T>
|
|
||||||
concept is_over_big_int =
|
|
||||||
std::is_same_v<T, Int128>
|
|
||||||
|| std::is_same_v<T, UInt128>
|
|
||||||
|| std::is_same_v<T, Int256>
|
|
||||||
|| std::is_same_v<T, UInt256>
|
|
||||||
|| std::is_same_v<T, Decimal128>
|
|
||||||
|| std::is_same_v<T, Decimal256>;
|
|
||||||
|
|
||||||
template <class T> struct NativeTypeT { using Type = T; };
|
template <class T> struct NativeTypeT { using Type = T; };
|
||||||
template <is_decimal T> struct NativeTypeT<T> { using Type = typename T::NativeType; };
|
template <is_decimal T> struct NativeTypeT<T> { using Type = typename T::NativeType; };
|
||||||
template <class T> using NativeType = typename NativeTypeT<T>::Type;
|
template <class T> using NativeType = typename NativeTypeT<T>::Type;
|
||||||
@ -49,18 +33,18 @@ struct Decimal
|
|||||||
using NativeType = T;
|
using NativeType = T;
|
||||||
|
|
||||||
constexpr Decimal() = default;
|
constexpr Decimal() = default;
|
||||||
constexpr Decimal(Decimal<T> &&) = default;
|
constexpr Decimal(Decimal<T> &&) noexcept = default;
|
||||||
constexpr Decimal(const Decimal<T> &) = default;
|
constexpr Decimal(const Decimal<T> &) = default;
|
||||||
|
|
||||||
constexpr Decimal(const T & value_): value(value_) {}
|
constexpr Decimal(const T & value_): value(value_) {} // NOLINT(google-explicit-constructor)
|
||||||
|
|
||||||
template <typename U>
|
template <typename U>
|
||||||
constexpr Decimal(const Decimal<U> & x): value(x.value) {}
|
constexpr Decimal(const Decimal<U> & x): value(x.value) {} // NOLINT(google-explicit-constructor)
|
||||||
|
|
||||||
constexpr Decimal<T> & operator = (Decimal<T> &&) = default;
|
constexpr Decimal<T> & operator=(Decimal<T> &&) noexcept = default;
|
||||||
constexpr Decimal<T> & operator = (const Decimal<T> &) = default;
|
constexpr Decimal<T> & operator = (const Decimal<T> &) = default;
|
||||||
|
|
||||||
constexpr operator T () const { return value; }
|
constexpr operator T () const { return value; } // NOLINT(google-explicit-constructor)
|
||||||
|
|
||||||
template <typename U>
|
template <typename U>
|
||||||
constexpr U convertTo() const
|
constexpr U convertTo() const
|
||||||
@ -111,7 +95,7 @@ public:
|
|||||||
using Base::Base;
|
using Base::Base;
|
||||||
using NativeType = Base::NativeType;
|
using NativeType = Base::NativeType;
|
||||||
|
|
||||||
constexpr DateTime64(const Base & v): Base(v) {}
|
constexpr DateTime64(const Base & v): Base(v) {} // NOLINT(google-explicit-constructor)
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
46
base/base/Decimal_fwd.h
Normal file
46
base/base/Decimal_fwd.h
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <base/types.h>
|
||||||
|
|
||||||
|
namespace wide
|
||||||
|
{
|
||||||
|
|
||||||
|
template <size_t Bits, typename Signed>
|
||||||
|
class integer;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
using Int128 = wide::integer<128, signed>;
|
||||||
|
using UInt128 = wide::integer<128, unsigned>;
|
||||||
|
using Int256 = wide::integer<256, signed>;
|
||||||
|
using UInt256 = wide::integer<256, unsigned>;
|
||||||
|
|
||||||
|
namespace DB
|
||||||
|
{
|
||||||
|
|
||||||
|
template <class> struct Decimal;
|
||||||
|
|
||||||
|
using Decimal32 = Decimal<Int32>;
|
||||||
|
using Decimal64 = Decimal<Int64>;
|
||||||
|
using Decimal128 = Decimal<Int128>;
|
||||||
|
using Decimal256 = Decimal<Int256>;
|
||||||
|
|
||||||
|
class DateTime64;
|
||||||
|
|
||||||
|
template <class T>
|
||||||
|
concept is_decimal =
|
||||||
|
std::is_same_v<T, Decimal32>
|
||||||
|
|| std::is_same_v<T, Decimal64>
|
||||||
|
|| std::is_same_v<T, Decimal128>
|
||||||
|
|| std::is_same_v<T, Decimal256>
|
||||||
|
|| std::is_same_v<T, DateTime64>;
|
||||||
|
|
||||||
|
template <class T>
|
||||||
|
concept is_over_big_int =
|
||||||
|
std::is_same_v<T, Int128>
|
||||||
|
|| std::is_same_v<T, UInt128>
|
||||||
|
|| std::is_same_v<T, Int256>
|
||||||
|
|| std::is_same_v<T, UInt256>
|
||||||
|
|| std::is_same_v<T, Decimal128>
|
||||||
|
|| std::is_same_v<T, Decimal256>;
|
||||||
|
}
|
@ -36,14 +36,14 @@ struct DecomposedFloat
|
|||||||
{
|
{
|
||||||
using Traits = FloatTraits<T>;
|
using Traits = FloatTraits<T>;
|
||||||
|
|
||||||
DecomposedFloat(T x)
|
explicit DecomposedFloat(T x)
|
||||||
{
|
{
|
||||||
memcpy(&x_uint, &x, sizeof(x));
|
memcpy(&x_uint, &x, sizeof(x));
|
||||||
}
|
}
|
||||||
|
|
||||||
typename Traits::UInt x_uint;
|
typename Traits::UInt x_uint;
|
||||||
|
|
||||||
bool is_negative() const
|
bool isNegative() const
|
||||||
{
|
{
|
||||||
return x_uint >> (Traits::bits - 1);
|
return x_uint >> (Traits::bits - 1);
|
||||||
}
|
}
|
||||||
@ -53,7 +53,7 @@ struct DecomposedFloat
|
|||||||
{
|
{
|
||||||
return (exponent() == 0 && mantissa() == 0)
|
return (exponent() == 0 && mantissa() == 0)
|
||||||
? 0
|
? 0
|
||||||
: (is_negative()
|
: (isNegative()
|
||||||
? -1
|
? -1
|
||||||
: 1);
|
: 1);
|
||||||
}
|
}
|
||||||
@ -63,7 +63,7 @@ struct DecomposedFloat
|
|||||||
return (x_uint >> (Traits::mantissa_bits)) & (((1ull << (Traits::exponent_bits + 1)) - 1) >> 1);
|
return (x_uint >> (Traits::mantissa_bits)) & (((1ull << (Traits::exponent_bits + 1)) - 1) >> 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
int16_t normalized_exponent() const
|
int16_t normalizedExponent() const
|
||||||
{
|
{
|
||||||
return int16_t(exponent()) - ((1ull << (Traits::exponent_bits - 1)) - 1);
|
return int16_t(exponent()) - ((1ull << (Traits::exponent_bits - 1)) - 1);
|
||||||
}
|
}
|
||||||
@ -73,20 +73,20 @@ struct DecomposedFloat
|
|||||||
return x_uint & ((1ull << Traits::mantissa_bits) - 1);
|
return x_uint & ((1ull << Traits::mantissa_bits) - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
int64_t mantissa_with_sign() const
|
int64_t mantissaWithSign() const
|
||||||
{
|
{
|
||||||
return is_negative() ? -mantissa() : mantissa();
|
return isNegative() ? -mantissa() : mantissa();
|
||||||
}
|
}
|
||||||
|
|
||||||
/// NOTE Probably floating point instructions can be better.
|
/// NOTE Probably floating point instructions can be better.
|
||||||
bool is_integer_in_representable_range() const
|
bool isIntegerInRepresentableRange() const
|
||||||
{
|
{
|
||||||
return x_uint == 0
|
return x_uint == 0
|
||||||
|| (normalized_exponent() >= 0 /// The number is not less than one
|
|| (normalizedExponent() >= 0 /// The number is not less than one
|
||||||
/// The number is inside the range where every integer has exact representation in float
|
/// The number is inside the range where every integer has exact representation in float
|
||||||
&& normalized_exponent() <= static_cast<int16_t>(Traits::mantissa_bits)
|
&& normalizedExponent() <= static_cast<int16_t>(Traits::mantissa_bits)
|
||||||
/// After multiplying by 2^exp, the fractional part becomes zero, means the number is integer
|
/// After multiplying by 2^exp, the fractional part becomes zero, means the number is integer
|
||||||
&& ((mantissa() & ((1ULL << (Traits::mantissa_bits - normalized_exponent())) - 1)) == 0));
|
&& ((mantissa() & ((1ULL << (Traits::mantissa_bits - normalizedExponent())) - 1)) == 0));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -102,15 +102,15 @@ struct DecomposedFloat
|
|||||||
return sign();
|
return sign();
|
||||||
|
|
||||||
/// Different signs
|
/// Different signs
|
||||||
if (is_negative() && rhs > 0)
|
if (isNegative() && rhs > 0)
|
||||||
return -1;
|
return -1;
|
||||||
if (!is_negative() && rhs < 0)
|
if (!isNegative() && rhs < 0)
|
||||||
return 1;
|
return 1;
|
||||||
|
|
||||||
/// Fractional number with magnitude less than one
|
/// Fractional number with magnitude less than one
|
||||||
if (normalized_exponent() < 0)
|
if (normalizedExponent() < 0)
|
||||||
{
|
{
|
||||||
if (!is_negative())
|
if (!isNegative())
|
||||||
return rhs > 0 ? -1 : 1;
|
return rhs > 0 ? -1 : 1;
|
||||||
else
|
else
|
||||||
return rhs >= 0 ? -1 : 1;
|
return rhs >= 0 ? -1 : 1;
|
||||||
@ -121,11 +121,11 @@ struct DecomposedFloat
|
|||||||
{
|
{
|
||||||
if (rhs == std::numeric_limits<Int>::lowest())
|
if (rhs == std::numeric_limits<Int>::lowest())
|
||||||
{
|
{
|
||||||
assert(is_negative());
|
assert(isNegative());
|
||||||
|
|
||||||
if (normalized_exponent() < static_cast<int16_t>(8 * sizeof(Int) - is_signed_v<Int>))
|
if (normalizedExponent() < static_cast<int16_t>(8 * sizeof(Int) - is_signed_v<Int>))
|
||||||
return 1;
|
return 1;
|
||||||
if (normalized_exponent() > static_cast<int16_t>(8 * sizeof(Int) - is_signed_v<Int>))
|
if (normalizedExponent() > static_cast<int16_t>(8 * sizeof(Int) - is_signed_v<Int>))
|
||||||
return -1;
|
return -1;
|
||||||
|
|
||||||
if (mantissa() == 0)
|
if (mantissa() == 0)
|
||||||
@ -136,44 +136,44 @@ struct DecomposedFloat
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Too large number: abs(float) > abs(rhs). Also the case with infinities and NaN.
|
/// Too large number: abs(float) > abs(rhs). Also the case with infinities and NaN.
|
||||||
if (normalized_exponent() >= static_cast<int16_t>(8 * sizeof(Int) - is_signed_v<Int>))
|
if (normalizedExponent() >= static_cast<int16_t>(8 * sizeof(Int) - is_signed_v<Int>))
|
||||||
return is_negative() ? -1 : 1;
|
return isNegative() ? -1 : 1;
|
||||||
|
|
||||||
using UInt = std::conditional_t<(sizeof(Int) > sizeof(typename Traits::UInt)), make_unsigned_t<Int>, typename Traits::UInt>;
|
using UInt = std::conditional_t<(sizeof(Int) > sizeof(typename Traits::UInt)), make_unsigned_t<Int>, typename Traits::UInt>;
|
||||||
UInt uint_rhs = rhs < 0 ? -rhs : rhs;
|
UInt uint_rhs = rhs < 0 ? -rhs : rhs;
|
||||||
|
|
||||||
/// Smaller octave: abs(rhs) < abs(float)
|
/// Smaller octave: abs(rhs) < abs(float)
|
||||||
/// FYI, TIL: octave is also called "binade", https://en.wikipedia.org/wiki/Binade
|
/// FYI, TIL: octave is also called "binade", https://en.wikipedia.org/wiki/Binade
|
||||||
if (uint_rhs < (static_cast<UInt>(1) << normalized_exponent()))
|
if (uint_rhs < (static_cast<UInt>(1) << normalizedExponent()))
|
||||||
return is_negative() ? -1 : 1;
|
return isNegative() ? -1 : 1;
|
||||||
|
|
||||||
/// Larger octave: abs(rhs) > abs(float)
|
/// Larger octave: abs(rhs) > abs(float)
|
||||||
if (normalized_exponent() + 1 < static_cast<int16_t>(8 * sizeof(Int) - is_signed_v<Int>)
|
if (normalizedExponent() + 1 < static_cast<int16_t>(8 * sizeof(Int) - is_signed_v<Int>)
|
||||||
&& uint_rhs >= (static_cast<UInt>(1) << (normalized_exponent() + 1)))
|
&& uint_rhs >= (static_cast<UInt>(1) << (normalizedExponent() + 1)))
|
||||||
return is_negative() ? 1 : -1;
|
return isNegative() ? 1 : -1;
|
||||||
|
|
||||||
/// The same octave
|
/// The same octave
|
||||||
/// uint_rhs == 2 ^ normalized_exponent + mantissa * 2 ^ (normalized_exponent - mantissa_bits)
|
/// uint_rhs == 2 ^ normalizedExponent + mantissa * 2 ^ (normalizedExponent - mantissa_bits)
|
||||||
|
|
||||||
bool large_and_always_integer = normalized_exponent() >= static_cast<int16_t>(Traits::mantissa_bits);
|
bool large_and_always_integer = normalizedExponent() >= static_cast<int16_t>(Traits::mantissa_bits);
|
||||||
|
|
||||||
UInt a = large_and_always_integer
|
UInt a = large_and_always_integer
|
||||||
? static_cast<UInt>(mantissa()) << (normalized_exponent() - Traits::mantissa_bits)
|
? static_cast<UInt>(mantissa()) << (normalizedExponent() - Traits::mantissa_bits)
|
||||||
: static_cast<UInt>(mantissa()) >> (Traits::mantissa_bits - normalized_exponent());
|
: static_cast<UInt>(mantissa()) >> (Traits::mantissa_bits - normalizedExponent());
|
||||||
|
|
||||||
UInt b = uint_rhs - (static_cast<UInt>(1) << normalized_exponent());
|
UInt b = uint_rhs - (static_cast<UInt>(1) << normalizedExponent());
|
||||||
|
|
||||||
if (a < b)
|
if (a < b)
|
||||||
return is_negative() ? 1 : -1;
|
return isNegative() ? 1 : -1;
|
||||||
if (a > b)
|
if (a > b)
|
||||||
return is_negative() ? -1 : 1;
|
return isNegative() ? -1 : 1;
|
||||||
|
|
||||||
/// Float has no fractional part means that the numbers are equal.
|
/// Float has no fractional part means that the numbers are equal.
|
||||||
if (large_and_always_integer || (mantissa() & ((1ULL << (Traits::mantissa_bits - normalized_exponent())) - 1)) == 0)
|
if (large_and_always_integer || (mantissa() & ((1ULL << (Traits::mantissa_bits - normalizedExponent())) - 1)) == 0)
|
||||||
return 0;
|
return 0;
|
||||||
else
|
else
|
||||||
/// Float has fractional part means its abs value is larger.
|
/// Float has fractional part means its abs value is larger.
|
||||||
return is_negative() ? -1 : 1;
|
return isNegative() ? -1 : 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
#include <magic_enum.hpp>
|
#include <magic_enum.hpp>
|
||||||
#include <fmt/format.h>
|
#include <fmt/format.h>
|
||||||
|
|
||||||
|
|
||||||
template <class T> concept is_enum = std::is_enum_v<T>;
|
template <class T> concept is_enum = std::is_enum_v<T>;
|
||||||
|
|
||||||
namespace detail
|
namespace detail
|
||||||
|
77
base/base/IPv4andIPv6.h
Normal file
77
base/base/IPv4andIPv6.h
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <base/strong_typedef.h>
|
||||||
|
#include <base/extended_types.h>
|
||||||
|
#include <Common/formatIPv6.h>
|
||||||
|
#include <Common/memcmpSmall.h>
|
||||||
|
|
||||||
|
namespace DB
|
||||||
|
{
|
||||||
|
|
||||||
|
struct IPv4 : StrongTypedef<UInt32, struct IPv4Tag>
|
||||||
|
{
|
||||||
|
using StrongTypedef::StrongTypedef;
|
||||||
|
using StrongTypedef::operator=;
|
||||||
|
constexpr explicit IPv4(UInt64 value): StrongTypedef(static_cast<UnderlyingType>(value)) {}
|
||||||
|
};
|
||||||
|
|
||||||
|
struct IPv6 : StrongTypedef<UInt128, struct IPv6Tag>
|
||||||
|
{
|
||||||
|
using StrongTypedef::StrongTypedef;
|
||||||
|
using StrongTypedef::operator=;
|
||||||
|
|
||||||
|
bool operator<(const IPv6 & rhs) const
|
||||||
|
{
|
||||||
|
return
|
||||||
|
memcmp16(
|
||||||
|
reinterpret_cast<const unsigned char *>(toUnderType().items),
|
||||||
|
reinterpret_cast<const unsigned char *>(rhs.toUnderType().items)
|
||||||
|
) < 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool operator>(const IPv6 & rhs) const
|
||||||
|
{
|
||||||
|
return
|
||||||
|
memcmp16(
|
||||||
|
reinterpret_cast<const unsigned char *>(toUnderType().items),
|
||||||
|
reinterpret_cast<const unsigned char *>(rhs.toUnderType().items)
|
||||||
|
) > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool operator==(const IPv6 & rhs) const
|
||||||
|
{
|
||||||
|
return
|
||||||
|
memcmp16(
|
||||||
|
reinterpret_cast<const unsigned char *>(toUnderType().items),
|
||||||
|
reinterpret_cast<const unsigned char *>(rhs.toUnderType().items)
|
||||||
|
) == 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool operator<=(const IPv6 & rhs) const { return !operator>(rhs); }
|
||||||
|
bool operator>=(const IPv6 & rhs) const { return !operator<(rhs); }
|
||||||
|
bool operator!=(const IPv6 & rhs) const { return !operator==(rhs); }
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
namespace std
|
||||||
|
{
|
||||||
|
/// For historical reasons we hash IPv6 as a FixedString(16)
|
||||||
|
template <>
|
||||||
|
struct hash<DB::IPv6>
|
||||||
|
{
|
||||||
|
size_t operator()(const DB::IPv6 & x) const
|
||||||
|
{
|
||||||
|
return std::hash<std::string_view>{}(std::string_view(reinterpret_cast<const char*>(&x.toUnderType()), IPV6_BINARY_LENGTH));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
template <>
|
||||||
|
struct hash<DB::IPv4>
|
||||||
|
{
|
||||||
|
size_t operator()(const DB::IPv4 & x) const
|
||||||
|
{
|
||||||
|
return std::hash<DB::IPv4::UnderlyingType>()(x.toUnderType());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
@ -1,5 +1,5 @@
|
|||||||
#include <string>
|
#include <string>
|
||||||
#include <string.h>
|
#include <cstring>
|
||||||
|
|
||||||
#include <Poco/UTF8Encoding.h>
|
#include <Poco/UTF8Encoding.h>
|
||||||
#include <Poco/NumberParser.h>
|
#include <Poco/NumberParser.h>
|
||||||
@ -7,15 +7,20 @@
|
|||||||
#include <base/find_symbols.h>
|
#include <base/find_symbols.h>
|
||||||
#include <base/preciseExp10.h>
|
#include <base/preciseExp10.h>
|
||||||
|
|
||||||
#include <iostream>
|
|
||||||
|
|
||||||
#define JSON_MAX_DEPTH 100
|
#define JSON_MAX_DEPTH 100
|
||||||
|
|
||||||
|
|
||||||
POCO_IMPLEMENT_EXCEPTION(JSONException, Poco::Exception, "JSONException")
|
#ifdef __clang__
|
||||||
|
# pragma clang diagnostic push
|
||||||
|
# pragma clang diagnostic ignored "-Wdeprecated-dynamic-exception-spec"
|
||||||
|
#endif
|
||||||
|
POCO_IMPLEMENT_EXCEPTION(JSONException, Poco::Exception, "JSONException") // NOLINT(cert-err60-cpp, modernize-use-noexcept, hicpp-use-noexcept)
|
||||||
|
#ifdef __clang__
|
||||||
|
# pragma clang diagnostic pop
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
/// Прочитать беззнаковое целое в простом формате из не-0-terminated строки.
|
/// Read unsigned integer in a simple form from a non-0-terminated string.
|
||||||
static UInt64 readUIntText(const char * buf, const char * end)
|
static UInt64 readUIntText(const char * buf, const char * end)
|
||||||
{
|
{
|
||||||
UInt64 x = 0;
|
UInt64 x = 0;
|
||||||
@ -52,7 +57,7 @@ static UInt64 readUIntText(const char * buf, const char * end)
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// Прочитать знаковое целое в простом формате из не-0-terminated строки.
|
/// Read signed integer in a simple form from a non-0-terminated string.
|
||||||
static Int64 readIntText(const char * buf, const char * end)
|
static Int64 readIntText(const char * buf, const char * end)
|
||||||
{
|
{
|
||||||
bool negative = false;
|
bool negative = false;
|
||||||
@ -95,7 +100,7 @@ static Int64 readIntText(const char * buf, const char * end)
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// Прочитать число с плавающей запятой в простом формате, с грубым округлением, из не-0-terminated строки.
|
/// Read floating point number in simple format, imprecisely, from a non-0-terminated string.
|
||||||
static double readFloatText(const char * buf, const char * end)
|
static double readFloatText(const char * buf, const char * end)
|
||||||
{
|
{
|
||||||
bool negative = false;
|
bool negative = false;
|
||||||
@ -144,8 +149,8 @@ static double readFloatText(const char * buf, const char * end)
|
|||||||
case 'E':
|
case 'E':
|
||||||
{
|
{
|
||||||
++buf;
|
++buf;
|
||||||
Int32 exponent = readIntText(buf, end);
|
auto exponent = readIntText(buf, end);
|
||||||
x *= preciseExp10(exponent);
|
x *= preciseExp10(static_cast<double>(exponent));
|
||||||
|
|
||||||
run = false;
|
run = false;
|
||||||
break;
|
break;
|
||||||
@ -200,7 +205,7 @@ JSON::ElementType JSON::getType() const
|
|||||||
return TYPE_NUMBER;
|
return TYPE_NUMBER;
|
||||||
case '"':
|
case '"':
|
||||||
{
|
{
|
||||||
/// Проверим - это просто строка или name-value pair
|
/// Is it a string or a name-value pair?
|
||||||
Pos after_string = skipString();
|
Pos after_string = skipString();
|
||||||
if (after_string < ptr_end && *after_string == ':')
|
if (after_string < ptr_end && *after_string == ':')
|
||||||
return TYPE_NAME_VALUE_PAIR;
|
return TYPE_NAME_VALUE_PAIR;
|
||||||
@ -222,15 +227,13 @@ void JSON::checkPos(Pos pos) const
|
|||||||
|
|
||||||
JSON::Pos JSON::skipString() const
|
JSON::Pos JSON::skipString() const
|
||||||
{
|
{
|
||||||
//std::cerr << "skipString()\t" << data() << std::endl;
|
|
||||||
|
|
||||||
Pos pos = ptr_begin;
|
Pos pos = ptr_begin;
|
||||||
checkPos(pos);
|
checkPos(pos);
|
||||||
if (*pos != '"')
|
if (*pos != '"')
|
||||||
throw JSONException(std::string("JSON: expected \", got ") + *pos);
|
throw JSONException(std::string("JSON: expected \", got ") + *pos);
|
||||||
++pos;
|
++pos;
|
||||||
|
|
||||||
/// fast path: находим следующую двойную кавычку. Если перед ней нет бэкслеша - значит это конец строки (при допущении корректности JSON).
|
/// fast path: find next double quote. If it is not escaped by backslash - then it's an end of string (assuming JSON is valid).
|
||||||
Pos closing_quote = reinterpret_cast<const char *>(memchr(reinterpret_cast<const void *>(pos), '\"', ptr_end - pos));
|
Pos closing_quote = reinterpret_cast<const char *>(memchr(reinterpret_cast<const void *>(pos), '\"', ptr_end - pos));
|
||||||
if (nullptr != closing_quote && closing_quote[-1] != '\\')
|
if (nullptr != closing_quote && closing_quote[-1] != '\\')
|
||||||
return closing_quote + 1;
|
return closing_quote + 1;
|
||||||
@ -262,8 +265,6 @@ JSON::Pos JSON::skipString() const
|
|||||||
|
|
||||||
JSON::Pos JSON::skipNumber() const
|
JSON::Pos JSON::skipNumber() const
|
||||||
{
|
{
|
||||||
//std::cerr << "skipNumber()\t" << data() << std::endl;
|
|
||||||
|
|
||||||
Pos pos = ptr_begin;
|
Pos pos = ptr_begin;
|
||||||
|
|
||||||
checkPos(pos);
|
checkPos(pos);
|
||||||
@ -289,8 +290,6 @@ JSON::Pos JSON::skipNumber() const
|
|||||||
|
|
||||||
JSON::Pos JSON::skipBool() const
|
JSON::Pos JSON::skipBool() const
|
||||||
{
|
{
|
||||||
//std::cerr << "skipBool()\t" << data() << std::endl;
|
|
||||||
|
|
||||||
Pos pos = ptr_begin;
|
Pos pos = ptr_begin;
|
||||||
checkPos(pos);
|
checkPos(pos);
|
||||||
|
|
||||||
@ -307,16 +306,12 @@ JSON::Pos JSON::skipBool() const
|
|||||||
|
|
||||||
JSON::Pos JSON::skipNull() const
|
JSON::Pos JSON::skipNull() const
|
||||||
{
|
{
|
||||||
//std::cerr << "skipNull()\t" << data() << std::endl;
|
|
||||||
|
|
||||||
return ptr_begin + 4;
|
return ptr_begin + 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
JSON::Pos JSON::skipNameValuePair() const
|
JSON::Pos JSON::skipNameValuePair() const
|
||||||
{
|
{
|
||||||
//std::cerr << "skipNameValuePair()\t" << data() << std::endl;
|
|
||||||
|
|
||||||
Pos pos = skipString();
|
Pos pos = skipString();
|
||||||
checkPos(pos);
|
checkPos(pos);
|
||||||
|
|
||||||
@ -331,8 +326,6 @@ JSON::Pos JSON::skipNameValuePair() const
|
|||||||
|
|
||||||
JSON::Pos JSON::skipArray() const
|
JSON::Pos JSON::skipArray() const
|
||||||
{
|
{
|
||||||
//std::cerr << "skipArray()\t" << data() << std::endl;
|
|
||||||
|
|
||||||
if (!isArray())
|
if (!isArray())
|
||||||
throw JSONException("JSON: expected [");
|
throw JSONException("JSON: expected [");
|
||||||
Pos pos = ptr_begin;
|
Pos pos = ptr_begin;
|
||||||
@ -363,8 +356,6 @@ JSON::Pos JSON::skipArray() const
|
|||||||
|
|
||||||
JSON::Pos JSON::skipObject() const
|
JSON::Pos JSON::skipObject() const
|
||||||
{
|
{
|
||||||
//std::cerr << "skipObject()\t" << data() << std::endl;
|
|
||||||
|
|
||||||
if (!isObject())
|
if (!isObject())
|
||||||
throw JSONException("JSON: expected {");
|
throw JSONException("JSON: expected {");
|
||||||
Pos pos = ptr_begin;
|
Pos pos = ptr_begin;
|
||||||
@ -395,8 +386,6 @@ JSON::Pos JSON::skipObject() const
|
|||||||
|
|
||||||
JSON::Pos JSON::skipElement() const
|
JSON::Pos JSON::skipElement() const
|
||||||
{
|
{
|
||||||
//std::cerr << "skipElement()\t" << data() << std::endl;
|
|
||||||
|
|
||||||
ElementType type = getType();
|
ElementType type = getType();
|
||||||
|
|
||||||
switch (type)
|
switch (type)
|
||||||
@ -475,9 +464,8 @@ JSON::Pos JSON::searchField(const char * data, size_t size) const
|
|||||||
{
|
{
|
||||||
if (!it->hasEscapes())
|
if (!it->hasEscapes())
|
||||||
{
|
{
|
||||||
if (static_cast<int>(size) + 2 > it->dataEnd() - it->data())
|
const auto current_name = it->getRawName();
|
||||||
continue;
|
if (current_name.size() == size && 0 == memcmp(current_name.data(), data, size))
|
||||||
if (!strncmp(data, it->data() + 1, size))
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@ -633,7 +621,7 @@ std::string JSON::getString() const
|
|||||||
{
|
{
|
||||||
throw JSONException("JSON: incorrect syntax: incorrect HEX code.");
|
throw JSONException("JSON: incorrect syntax: incorrect HEX code.");
|
||||||
}
|
}
|
||||||
buf.resize(buf.size() + 6); /// максимальный размер UTF8 многобайтовой последовательности
|
buf.resize(buf.size() + 6); /// Max size of UTF-8 sequence, including pre-standard mapping of UCS-4 to UTF-8.
|
||||||
int res = utf8.convert(unicode,
|
int res = utf8.convert(unicode,
|
||||||
reinterpret_cast<unsigned char *>(const_cast<char*>(buf.data())) + buf.size() - 6, 6);
|
reinterpret_cast<unsigned char *>(const_cast<char*>(buf.data())) + buf.size() - 6, 6);
|
||||||
if (!res)
|
if (!res)
|
||||||
@ -662,18 +650,18 @@ std::string JSON::getName() const
|
|||||||
return getString();
|
return getString();
|
||||||
}
|
}
|
||||||
|
|
||||||
StringRef JSON::getRawString() const
|
std::string_view JSON::getRawString() const
|
||||||
{
|
{
|
||||||
Pos s = ptr_begin;
|
Pos s = ptr_begin;
|
||||||
if (*s != '"')
|
if (*s != '"')
|
||||||
throw JSONException(std::string("JSON: expected \", got ") + *s);
|
throw JSONException(std::string("JSON: expected \", got ") + *s);
|
||||||
while (++s != ptr_end && *s != '"');
|
while (++s != ptr_end && *s != '"');
|
||||||
if (s != ptr_end)
|
if (s != ptr_end)
|
||||||
return StringRef(ptr_begin + 1, s - ptr_begin - 1);
|
return std::string_view(ptr_begin + 1, s - ptr_begin - 1);
|
||||||
throw JSONException("JSON: incorrect syntax (expected end of string, found end of JSON).");
|
throw JSONException("JSON: incorrect syntax (expected end of string, found end of JSON).");
|
||||||
}
|
}
|
||||||
|
|
||||||
StringRef JSON::getRawName() const
|
std::string_view JSON::getRawName() const
|
||||||
{
|
{
|
||||||
return getRawString();
|
return getRawString();
|
||||||
}
|
}
|
||||||
@ -747,8 +735,6 @@ JSON::iterator JSON::iterator::begin() const
|
|||||||
if (type != TYPE_ARRAY && type != TYPE_OBJECT)
|
if (type != TYPE_ARRAY && type != TYPE_OBJECT)
|
||||||
throw JSONException("JSON: not array or object when calling begin() method.");
|
throw JSONException("JSON: not array or object when calling begin() method.");
|
||||||
|
|
||||||
//std::cerr << "begin()\t" << data() << std::endl;
|
|
||||||
|
|
||||||
Pos pos = ptr_begin + 1;
|
Pos pos = ptr_begin + 1;
|
||||||
checkPos(pos);
|
checkPos(pos);
|
||||||
if (*pos == '}' || *pos == ']')
|
if (*pos == '}' || *pos == ']')
|
||||||
@ -839,4 +825,3 @@ bool JSON::isType<bool>() const
|
|||||||
{
|
{
|
||||||
return isBool();
|
return isBool();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -38,8 +38,16 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
|
|
||||||
|
// NOLINTBEGIN(google-explicit-constructor)
|
||||||
|
#ifdef __clang__
|
||||||
|
# pragma clang diagnostic push
|
||||||
|
# pragma clang diagnostic ignored "-Wdeprecated-dynamic-exception-spec"
|
||||||
|
#endif
|
||||||
POCO_DECLARE_EXCEPTION(Foundation_API, JSONException, Poco::Exception)
|
POCO_DECLARE_EXCEPTION(Foundation_API, JSONException, Poco::Exception)
|
||||||
|
#ifdef __clang__
|
||||||
|
# pragma clang diagnostic pop
|
||||||
|
#endif
|
||||||
|
// NOLINTEND(google-explicit-constructor)
|
||||||
|
|
||||||
class JSON
|
class JSON
|
||||||
{
|
{
|
||||||
@ -55,7 +63,7 @@ public:
|
|||||||
checkInit();
|
checkInit();
|
||||||
}
|
}
|
||||||
|
|
||||||
JSON(const std::string & s) : ptr_begin(s.data()), ptr_end(s.data() + s.size()), level(0)
|
explicit JSON(std::string_view s) : ptr_begin(s.data()), ptr_end(s.data() + s.size()), level(0)
|
||||||
{
|
{
|
||||||
checkInit();
|
checkInit();
|
||||||
}
|
}
|
||||||
@ -65,13 +73,7 @@ public:
|
|||||||
*this = rhs;
|
*this = rhs;
|
||||||
}
|
}
|
||||||
|
|
||||||
JSON & operator=(const JSON & rhs)
|
JSON & operator=(const JSON & rhs) = default;
|
||||||
{
|
|
||||||
ptr_begin = rhs.ptr_begin;
|
|
||||||
ptr_end = rhs.ptr_end;
|
|
||||||
level = rhs.level;
|
|
||||||
return *this;
|
|
||||||
}
|
|
||||||
|
|
||||||
const char * data() const { return ptr_begin; }
|
const char * data() const { return ptr_begin; }
|
||||||
const char * dataEnd() const { return ptr_end; }
|
const char * dataEnd() const { return ptr_end; }
|
||||||
@ -130,8 +132,8 @@ public:
|
|||||||
std::string getName() const; /// Получить имя name-value пары.
|
std::string getName() const; /// Получить имя name-value пары.
|
||||||
JSON getValue() const; /// Получить значение name-value пары.
|
JSON getValue() const; /// Получить значение name-value пары.
|
||||||
|
|
||||||
StringRef getRawString() const;
|
std::string_view getRawString() const;
|
||||||
StringRef getRawName() const;
|
std::string_view getRawName() const;
|
||||||
|
|
||||||
/// Получить значение элемента; если элемент - строка, то распарсить значение из строки; если не строка или число - то исключение.
|
/// Получить значение элемента; если элемент - строка, то распарсить значение из строки; если не строка или число - то исключение.
|
||||||
double toDouble() const;
|
double toDouble() const;
|
||||||
@ -163,7 +165,7 @@ public:
|
|||||||
|
|
||||||
/// Перейти к следующему элементу массива или следующей name-value паре объекта.
|
/// Перейти к следующему элементу массива или следующей name-value паре объекта.
|
||||||
iterator & operator++();
|
iterator & operator++();
|
||||||
iterator operator++(int);
|
iterator operator++(int); // NOLINT(cert-dcl21-cpp)
|
||||||
|
|
||||||
/// Есть ли в строке escape-последовательности
|
/// Есть ли в строке escape-последовательности
|
||||||
bool hasEscapes() const;
|
bool hasEscapes() const;
|
||||||
|
@ -1,69 +0,0 @@
|
|||||||
#pragma once
|
|
||||||
|
|
||||||
#include <mutex>
|
|
||||||
#include <atomic>
|
|
||||||
#include <vector>
|
|
||||||
#include <optional>
|
|
||||||
#include <replxx.hxx>
|
|
||||||
|
|
||||||
#include <base/types.h>
|
|
||||||
|
|
||||||
class LineReader
|
|
||||||
{
|
|
||||||
public:
|
|
||||||
struct Suggest
|
|
||||||
{
|
|
||||||
using Words = std::vector<std::string>;
|
|
||||||
|
|
||||||
/// Get vector for the matched range of words if any.
|
|
||||||
replxx::Replxx::completions_t getCompletions(const String & prefix, size_t prefix_length);
|
|
||||||
void addWords(Words && new_words);
|
|
||||||
|
|
||||||
private:
|
|
||||||
Words words;
|
|
||||||
Words words_no_case;
|
|
||||||
|
|
||||||
std::mutex mutex;
|
|
||||||
};
|
|
||||||
|
|
||||||
using Patterns = std::vector<const char *>;
|
|
||||||
|
|
||||||
LineReader(const String & history_file_path, bool multiline, Patterns extenders, Patterns delimiters);
|
|
||||||
virtual ~LineReader() {}
|
|
||||||
|
|
||||||
/// Reads the whole line until delimiter (in multiline mode) or until the last line without extender.
|
|
||||||
/// If resulting line is empty, it means the user interrupted the input.
|
|
||||||
/// Non-empty line is appended to history - without duplication.
|
|
||||||
/// Typical delimiter is ';' (semicolon) and typical extender is '\' (backslash).
|
|
||||||
String readLine(const String & first_prompt, const String & second_prompt);
|
|
||||||
|
|
||||||
/// When bracketed paste mode is set, pasted text is bracketed with control sequences so
|
|
||||||
/// that the program can differentiate pasted text from typed-in text. This helps
|
|
||||||
/// clickhouse-client so that without -m flag, one can still paste multiline queries, and
|
|
||||||
/// possibly get better pasting performance. See https://cirw.in/blog/bracketed-paste for
|
|
||||||
/// more details.
|
|
||||||
virtual void enableBracketedPaste() {}
|
|
||||||
|
|
||||||
protected:
|
|
||||||
enum InputStatus
|
|
||||||
{
|
|
||||||
ABORT = 0,
|
|
||||||
RESET_LINE,
|
|
||||||
INPUT_LINE,
|
|
||||||
};
|
|
||||||
|
|
||||||
const String history_file_path;
|
|
||||||
static constexpr char word_break_characters[] = " \t\v\f\a\b\r\n`~!@#$%^&*()-=+[{]}\\|;:'\",<.>/?";
|
|
||||||
|
|
||||||
String input;
|
|
||||||
|
|
||||||
bool multiline;
|
|
||||||
|
|
||||||
Patterns extenders;
|
|
||||||
Patterns delimiters;
|
|
||||||
|
|
||||||
String prev_line;
|
|
||||||
|
|
||||||
virtual InputStatus readOneLine(const String & prompt);
|
|
||||||
virtual void addToHistory(const String &) {}
|
|
||||||
};
|
|
@ -1,381 +0,0 @@
|
|||||||
#include <base/ReplxxLineReader.h>
|
|
||||||
#include <base/errnoToString.h>
|
|
||||||
|
|
||||||
#include <chrono>
|
|
||||||
#include <cerrno>
|
|
||||||
#include <cstring>
|
|
||||||
#include <unistd.h>
|
|
||||||
#include <functional>
|
|
||||||
#include <sys/file.h>
|
|
||||||
#include <sys/types.h>
|
|
||||||
#include <sys/wait.h>
|
|
||||||
#include <csignal>
|
|
||||||
#include <dlfcn.h>
|
|
||||||
#include <fcntl.h>
|
|
||||||
#include <fstream>
|
|
||||||
#include <fmt/format.h>
|
|
||||||
|
|
||||||
|
|
||||||
namespace
|
|
||||||
{
|
|
||||||
|
|
||||||
/// Trim ending whitespace inplace
|
|
||||||
void trim(String & s)
|
|
||||||
{
|
|
||||||
s.erase(std::find_if(s.rbegin(), s.rend(), [](unsigned char ch) { return !std::isspace(ch); }).base(), s.end());
|
|
||||||
}
|
|
||||||
|
|
||||||
std::string getEditor()
|
|
||||||
{
|
|
||||||
const char * editor = std::getenv("EDITOR");
|
|
||||||
|
|
||||||
if (!editor || !*editor)
|
|
||||||
editor = "vim";
|
|
||||||
|
|
||||||
return editor;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Copied from replxx::src/util.cxx::now_ms_str() under the terms of 3-clause BSD license of Replxx.
|
|
||||||
/// Copyright (c) 2017-2018, Marcin Konarski (amok at codestation.org)
|
|
||||||
/// Copyright (c) 2010, Salvatore Sanfilippo (antirez at gmail dot com)
|
|
||||||
/// Copyright (c) 2010, Pieter Noordhuis (pcnoordhuis at gmail dot com)
|
|
||||||
std::string replxx_now_ms_str()
|
|
||||||
{
|
|
||||||
std::chrono::milliseconds ms(std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::system_clock::now().time_since_epoch()));
|
|
||||||
time_t t = ms.count() / 1000;
|
|
||||||
tm broken;
|
|
||||||
if (!localtime_r(&t, &broken))
|
|
||||||
{
|
|
||||||
return std::string();
|
|
||||||
}
|
|
||||||
|
|
||||||
static int const BUFF_SIZE(32);
|
|
||||||
char str[BUFF_SIZE];
|
|
||||||
strftime(str, BUFF_SIZE, "%Y-%m-%d %H:%M:%S.", &broken);
|
|
||||||
snprintf(str + sizeof("YYYY-mm-dd HH:MM:SS"), 5, "%03d", static_cast<int>(ms.count() % 1000));
|
|
||||||
return str;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Convert from readline to replxx format.
|
|
||||||
///
|
|
||||||
/// replxx requires each history line to prepended with time line:
|
|
||||||
///
|
|
||||||
/// ### YYYY-MM-DD HH:MM:SS.SSS
|
|
||||||
/// select 1
|
|
||||||
///
|
|
||||||
/// And w/o those service lines it will load all lines from history file as
|
|
||||||
/// one history line for suggestion. And if there are lots of lines in file it
|
|
||||||
/// will take lots of time (getline() + tons of reallocations).
|
|
||||||
///
|
|
||||||
/// NOTE: this code uses std::ifstream/std::ofstream like original replxx code.
|
|
||||||
void convertHistoryFile(const std::string & path, replxx::Replxx & rx)
|
|
||||||
{
|
|
||||||
std::ifstream in(path);
|
|
||||||
if (!in)
|
|
||||||
{
|
|
||||||
rx.print("Cannot open %s reading (for conversion): %s\n",
|
|
||||||
path.c_str(), errnoToString(errno).c_str());
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::string line;
|
|
||||||
if (getline(in, line).bad())
|
|
||||||
{
|
|
||||||
rx.print("Cannot read from %s (for conversion): %s\n",
|
|
||||||
path.c_str(), errnoToString(errno).c_str());
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// This is the marker of the date, no need to convert.
|
|
||||||
static char const REPLXX_TIMESTAMP_PATTERN[] = "### dddd-dd-dd dd:dd:dd.ddd";
|
|
||||||
if (line.empty() || (line.starts_with("### ") && line.size() == strlen(REPLXX_TIMESTAMP_PATTERN)))
|
|
||||||
{
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::vector<std::string> lines;
|
|
||||||
in.seekg(0);
|
|
||||||
while (getline(in, line).good())
|
|
||||||
{
|
|
||||||
lines.push_back(line);
|
|
||||||
}
|
|
||||||
in.close();
|
|
||||||
|
|
||||||
size_t lines_size = lines.size();
|
|
||||||
std::sort(lines.begin(), lines.end());
|
|
||||||
lines.erase(std::unique(lines.begin(), lines.end()), lines.end());
|
|
||||||
rx.print("The history file (%s) is in old format. %zu lines, %zu unique lines.\n",
|
|
||||||
path.c_str(), lines_size, lines.size());
|
|
||||||
|
|
||||||
std::ofstream out(path);
|
|
||||||
if (!out)
|
|
||||||
{
|
|
||||||
rx.print("Cannot open %s for writing (for conversion): %s\n",
|
|
||||||
path.c_str(), errnoToString(errno).c_str());
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const std::string & timestamp = replxx_now_ms_str();
|
|
||||||
for (const auto & out_line : lines)
|
|
||||||
{
|
|
||||||
out << "### " << timestamp << "\n" << out_line << std::endl;
|
|
||||||
}
|
|
||||||
out.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
static bool replxx_last_is_delimiter = false;
|
|
||||||
void ReplxxLineReader::setLastIsDelimiter(bool flag)
|
|
||||||
{
|
|
||||||
replxx_last_is_delimiter = flag;
|
|
||||||
}
|
|
||||||
|
|
||||||
ReplxxLineReader::ReplxxLineReader(
|
|
||||||
Suggest & suggest,
|
|
||||||
const String & history_file_path_,
|
|
||||||
bool multiline_,
|
|
||||||
Patterns extenders_,
|
|
||||||
Patterns delimiters_,
|
|
||||||
replxx::Replxx::highlighter_callback_t highlighter_)
|
|
||||||
: LineReader(history_file_path_, multiline_, std::move(extenders_), std::move(delimiters_)), highlighter(std::move(highlighter_))
|
|
||||||
, editor(getEditor())
|
|
||||||
{
|
|
||||||
using namespace std::placeholders;
|
|
||||||
using Replxx = replxx::Replxx;
|
|
||||||
|
|
||||||
if (!history_file_path.empty())
|
|
||||||
{
|
|
||||||
history_file_fd = open(history_file_path.c_str(), O_RDWR);
|
|
||||||
if (history_file_fd < 0)
|
|
||||||
{
|
|
||||||
rx.print("Open of history file failed: %s\n", errnoToString(errno).c_str());
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
convertHistoryFile(history_file_path, rx);
|
|
||||||
|
|
||||||
if (flock(history_file_fd, LOCK_SH))
|
|
||||||
{
|
|
||||||
rx.print("Shared lock of history file failed: %s\n", errnoToString(errno).c_str());
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
if (!rx.history_load(history_file_path))
|
|
||||||
{
|
|
||||||
rx.print("Loading history failed: %s\n", errnoToString(errno).c_str());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (flock(history_file_fd, LOCK_UN))
|
|
||||||
{
|
|
||||||
rx.print("Unlock of history file failed: %s\n", errnoToString(errno).c_str());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
rx.install_window_change_handler();
|
|
||||||
|
|
||||||
auto callback = [&suggest] (const String & context, size_t context_size)
|
|
||||||
{
|
|
||||||
return suggest.getCompletions(context, context_size);
|
|
||||||
};
|
|
||||||
|
|
||||||
rx.set_completion_callback(callback);
|
|
||||||
rx.set_complete_on_empty(false);
|
|
||||||
rx.set_word_break_characters(word_break_characters);
|
|
||||||
rx.set_ignore_case(true);
|
|
||||||
|
|
||||||
if (highlighter)
|
|
||||||
rx.set_highlighter_callback(highlighter);
|
|
||||||
|
|
||||||
/// By default C-p/C-n binded to COMPLETE_NEXT/COMPLETE_PREV,
|
|
||||||
/// bind C-p/C-n to history-previous/history-next like readline.
|
|
||||||
rx.bind_key(Replxx::KEY::control('N'), [this](char32_t code) { return rx.invoke(Replxx::ACTION::HISTORY_NEXT, code); });
|
|
||||||
rx.bind_key(Replxx::KEY::control('P'), [this](char32_t code) { return rx.invoke(Replxx::ACTION::HISTORY_PREVIOUS, code); });
|
|
||||||
|
|
||||||
auto commit_action = [this](char32_t code)
|
|
||||||
{
|
|
||||||
/// If we allow multiline and there is already something in the input, start a newline.
|
|
||||||
/// NOTE: Lexer is only available if we use highlighter.
|
|
||||||
if (highlighter && multiline && !replxx_last_is_delimiter)
|
|
||||||
return rx.invoke(Replxx::ACTION::NEW_LINE, code);
|
|
||||||
replxx_last_is_delimiter = false;
|
|
||||||
return rx.invoke(Replxx::ACTION::COMMIT_LINE, code);
|
|
||||||
};
|
|
||||||
/// bind C-j to ENTER action.
|
|
||||||
rx.bind_key(Replxx::KEY::control('J'), commit_action);
|
|
||||||
rx.bind_key(Replxx::KEY::ENTER, commit_action);
|
|
||||||
|
|
||||||
/// By default COMPLETE_NEXT/COMPLETE_PREV was binded to C-p/C-n, re-bind
|
|
||||||
/// to M-P/M-N (that was used for HISTORY_COMMON_PREFIX_SEARCH before, but
|
|
||||||
/// it also binded to M-p/M-n).
|
|
||||||
rx.bind_key(Replxx::KEY::meta('N'), [this](char32_t code) { return rx.invoke(Replxx::ACTION::COMPLETE_NEXT, code); });
|
|
||||||
rx.bind_key(Replxx::KEY::meta('P'), [this](char32_t code) { return rx.invoke(Replxx::ACTION::COMPLETE_PREVIOUS, code); });
|
|
||||||
/// By default M-BACKSPACE is KILL_TO_WHITESPACE_ON_LEFT, while in readline it is backward-kill-word
|
|
||||||
rx.bind_key(Replxx::KEY::meta(Replxx::KEY::BACKSPACE), [this](char32_t code) { return rx.invoke(Replxx::ACTION::KILL_TO_BEGINING_OF_WORD, code); });
|
|
||||||
/// By default C-w is KILL_TO_BEGINING_OF_WORD, while in readline it is unix-word-rubout
|
|
||||||
rx.bind_key(Replxx::KEY::control('W'), [this](char32_t code) { return rx.invoke(Replxx::ACTION::KILL_TO_WHITESPACE_ON_LEFT, code); });
|
|
||||||
|
|
||||||
rx.bind_key(Replxx::KEY::meta('E'), [this](char32_t) { openEditor(); return Replxx::ACTION_RESULT::CONTINUE; });
|
|
||||||
}
|
|
||||||
|
|
||||||
ReplxxLineReader::~ReplxxLineReader()
|
|
||||||
{
|
|
||||||
if (close(history_file_fd))
|
|
||||||
rx.print("Close of history file failed: %s\n", errnoToString(errno).c_str());
|
|
||||||
}
|
|
||||||
|
|
||||||
LineReader::InputStatus ReplxxLineReader::readOneLine(const String & prompt)
|
|
||||||
{
|
|
||||||
input.clear();
|
|
||||||
|
|
||||||
const char* cinput = rx.input(prompt);
|
|
||||||
if (cinput == nullptr)
|
|
||||||
return (errno != EAGAIN) ? ABORT : RESET_LINE;
|
|
||||||
input = cinput;
|
|
||||||
|
|
||||||
trim(input);
|
|
||||||
return INPUT_LINE;
|
|
||||||
}
|
|
||||||
|
|
||||||
void ReplxxLineReader::addToHistory(const String & line)
|
|
||||||
{
|
|
||||||
// locking history file to prevent from inconsistent concurrent changes
|
|
||||||
//
|
|
||||||
// replxx::Replxx::history_save() already has lockf(),
|
|
||||||
// but replxx::Replxx::history_load() does not
|
|
||||||
// and that is why flock() is added here.
|
|
||||||
bool locked = false;
|
|
||||||
if (flock(history_file_fd, LOCK_EX))
|
|
||||||
rx.print("Lock of history file failed: %s\n", errnoToString(errno).c_str());
|
|
||||||
else
|
|
||||||
locked = true;
|
|
||||||
|
|
||||||
rx.history_add(line);
|
|
||||||
|
|
||||||
// flush changes to the disk
|
|
||||||
if (!rx.history_save(history_file_path))
|
|
||||||
rx.print("Saving history failed: %s\n", errnoToString(errno).c_str());
|
|
||||||
|
|
||||||
if (locked && 0 != flock(history_file_fd, LOCK_UN))
|
|
||||||
rx.print("Unlock of history file failed: %s\n", errnoToString(errno).c_str());
|
|
||||||
}
|
|
||||||
|
|
||||||
/// See comments in ShellCommand::executeImpl()
|
|
||||||
/// (for the vfork via dlsym())
|
|
||||||
int ReplxxLineReader::executeEditor(const std::string & path)
|
|
||||||
{
|
|
||||||
std::vector<char> argv0(editor.data(), editor.data() + editor.size() + 1);
|
|
||||||
std::vector<char> argv1(path.data(), path.data() + path.size() + 1);
|
|
||||||
char * const argv[] = {argv0.data(), argv1.data(), nullptr};
|
|
||||||
|
|
||||||
static void * real_vfork = dlsym(RTLD_DEFAULT, "vfork");
|
|
||||||
if (!real_vfork)
|
|
||||||
{
|
|
||||||
rx.print("Cannot find symbol vfork in myself: %s\n", errnoToString(errno).c_str());
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
pid_t pid = reinterpret_cast<pid_t (*)()>(real_vfork)();
|
|
||||||
|
|
||||||
if (-1 == pid)
|
|
||||||
{
|
|
||||||
rx.print("Cannot vfork: %s\n", errnoToString(errno).c_str());
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Child
|
|
||||||
if (0 == pid)
|
|
||||||
{
|
|
||||||
sigset_t mask;
|
|
||||||
sigemptyset(&mask);
|
|
||||||
sigprocmask(0, nullptr, &mask);
|
|
||||||
sigprocmask(SIG_UNBLOCK, &mask, nullptr);
|
|
||||||
|
|
||||||
execvp(editor.c_str(), argv);
|
|
||||||
rx.print("Cannot execute %s: %s\n", editor.c_str(), errnoToString(errno).c_str());
|
|
||||||
_exit(-1);
|
|
||||||
}
|
|
||||||
|
|
||||||
int status = 0;
|
|
||||||
do
|
|
||||||
{
|
|
||||||
int exited_pid = waitpid(pid, &status, 0);
|
|
||||||
if (exited_pid == -1)
|
|
||||||
{
|
|
||||||
if (errno == EINTR)
|
|
||||||
continue;
|
|
||||||
|
|
||||||
rx.print("Cannot waitpid: %s\n", errnoToString(errno).c_str());
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
break;
|
|
||||||
} while (true);
|
|
||||||
return status;
|
|
||||||
}
|
|
||||||
|
|
||||||
void ReplxxLineReader::openEditor()
|
|
||||||
{
|
|
||||||
char filename[] = "clickhouse_replxx_XXXXXX.sql";
|
|
||||||
int fd = ::mkstemps(filename, 4);
|
|
||||||
if (-1 == fd)
|
|
||||||
{
|
|
||||||
rx.print("Cannot create temporary file to edit query: %s\n", errnoToString(errno).c_str());
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
replxx::Replxx::State state(rx.get_state());
|
|
||||||
|
|
||||||
size_t bytes_written = 0;
|
|
||||||
const char * begin = state.text();
|
|
||||||
size_t offset = strlen(state.text());
|
|
||||||
while (bytes_written != offset)
|
|
||||||
{
|
|
||||||
ssize_t res = ::write(fd, begin + bytes_written, offset - bytes_written);
|
|
||||||
if ((-1 == res || 0 == res) && errno != EINTR)
|
|
||||||
{
|
|
||||||
rx.print("Cannot write to temporary query file %s: %s\n", filename, errnoToString(errno).c_str());
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
bytes_written += res;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (0 != ::close(fd))
|
|
||||||
{
|
|
||||||
rx.print("Cannot close temporary query file %s: %s\n", filename, errnoToString(errno).c_str());
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (0 == executeEditor(filename))
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
std::ifstream t(filename);
|
|
||||||
std::string str;
|
|
||||||
t.seekg(0, std::ios::end);
|
|
||||||
str.reserve(t.tellg());
|
|
||||||
t.seekg(0, std::ios::beg);
|
|
||||||
str.assign((std::istreambuf_iterator<char>(t)), std::istreambuf_iterator<char>());
|
|
||||||
rx.set_state(replxx::Replxx::State(str.c_str(), str.size()));
|
|
||||||
}
|
|
||||||
catch (...)
|
|
||||||
{
|
|
||||||
rx.print("Cannot read from temporary query file %s: %s\n", filename, errnoToString(errno).c_str());
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (bracketed_paste_enabled)
|
|
||||||
enableBracketedPaste();
|
|
||||||
|
|
||||||
if (0 != ::unlink(filename))
|
|
||||||
rx.print("Cannot remove temporary query file %s: %s\n", filename, errnoToString(errno).c_str());
|
|
||||||
}
|
|
||||||
|
|
||||||
void ReplxxLineReader::enableBracketedPaste()
|
|
||||||
{
|
|
||||||
bracketed_paste_enabled = true;
|
|
||||||
rx.enable_bracketed_paste();
|
|
||||||
};
|
|
@ -3,12 +3,15 @@
|
|||||||
#include <cassert>
|
#include <cassert>
|
||||||
#include <stdexcept> // for std::logic_error
|
#include <stdexcept> // for std::logic_error
|
||||||
#include <string>
|
#include <string>
|
||||||
|
#include <type_traits>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
#include <functional>
|
#include <functional>
|
||||||
#include <iosfwd>
|
#include <iosfwd>
|
||||||
|
|
||||||
|
#include <base/defines.h>
|
||||||
#include <base/types.h>
|
#include <base/types.h>
|
||||||
#include <base/unaligned.h>
|
#include <base/unaligned.h>
|
||||||
|
#include <base/simd.h>
|
||||||
|
|
||||||
#include <city.h>
|
#include <city.h>
|
||||||
|
|
||||||
@ -27,6 +30,15 @@
|
|||||||
#define CRC_INT __crc32cd
|
#define CRC_INT __crc32cd
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#if defined(__aarch64__) && defined(__ARM_NEON)
|
||||||
|
#include <arm_neon.h>
|
||||||
|
#pragma clang diagnostic ignored "-Wreserved-identifier"
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#if defined(__s390x__)
|
||||||
|
#include <base/crc32c_s390x.h>
|
||||||
|
#define CRC_INT s390x_crc32c
|
||||||
|
#endif
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The std::string_view-like container to avoid creating strings to find substrings in the hash table.
|
* The std::string_view-like container to avoid creating strings to find substrings in the hash table.
|
||||||
@ -37,7 +49,8 @@ struct StringRef
|
|||||||
size_t size = 0;
|
size_t size = 0;
|
||||||
|
|
||||||
/// Non-constexpr due to reinterpret_cast.
|
/// Non-constexpr due to reinterpret_cast.
|
||||||
template <typename CharT, typename = std::enable_if_t<sizeof(CharT) == 1>>
|
template <typename CharT>
|
||||||
|
requires (sizeof(CharT) == 1)
|
||||||
StringRef(const CharT * data_, size_t size_) : data(reinterpret_cast<const char *>(data_)), size(size_)
|
StringRef(const CharT * data_, size_t size_) : data(reinterpret_cast<const char *>(data_)), size(size_)
|
||||||
{
|
{
|
||||||
/// Sanity check for overflowed values.
|
/// Sanity check for overflowed values.
|
||||||
@ -51,19 +64,15 @@ struct StringRef
|
|||||||
constexpr StringRef(const char * data_) : StringRef(std::string_view{data_}) {} /// NOLINT
|
constexpr StringRef(const char * data_) : StringRef(std::string_view{data_}) {} /// NOLINT
|
||||||
constexpr StringRef() = default;
|
constexpr StringRef() = default;
|
||||||
|
|
||||||
|
bool empty() const { return size == 0; }
|
||||||
|
|
||||||
std::string toString() const { return std::string(data, size); }
|
std::string toString() const { return std::string(data, size); }
|
||||||
|
|
||||||
explicit operator std::string() const { return toString(); }
|
explicit operator std::string() const { return toString(); }
|
||||||
std::string_view toView() const { return std::string_view(data, size); }
|
|
||||||
|
|
||||||
|
std::string_view toView() const { return std::string_view(data, size); }
|
||||||
constexpr explicit operator std::string_view() const { return std::string_view(data, size); }
|
constexpr explicit operator std::string_view() const { return std::string_view(data, size); }
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Here constexpr doesn't implicate inline, see https://www.viva64.com/en/w/v1043/
|
|
||||||
/// nullptr can't be used because the StringRef values are used in SipHash's pointer arithmetic
|
|
||||||
/// and the UBSan thinks that something like nullptr + 8 is UB.
|
|
||||||
constexpr const inline char empty_string_ref_addr{};
|
|
||||||
constexpr const inline StringRef EMPTY_STRING_REF{&empty_string_ref_addr, 0};
|
|
||||||
|
|
||||||
using StringRefs = std::vector<StringRef>;
|
using StringRefs = std::vector<StringRef>;
|
||||||
|
|
||||||
@ -75,14 +84,14 @@ using StringRefs = std::vector<StringRef>;
|
|||||||
* For more information, see hash_map_string_2.cpp
|
* For more information, see hash_map_string_2.cpp
|
||||||
*/
|
*/
|
||||||
|
|
||||||
inline bool compareSSE2(const char * p1, const char * p2)
|
inline bool compare8(const char * p1, const char * p2)
|
||||||
{
|
{
|
||||||
return 0xFFFF == _mm_movemask_epi8(_mm_cmpeq_epi8(
|
return 0xFFFF == _mm_movemask_epi8(_mm_cmpeq_epi8(
|
||||||
_mm_loadu_si128(reinterpret_cast<const __m128i *>(p1)),
|
_mm_loadu_si128(reinterpret_cast<const __m128i *>(p1)),
|
||||||
_mm_loadu_si128(reinterpret_cast<const __m128i *>(p2))));
|
_mm_loadu_si128(reinterpret_cast<const __m128i *>(p2))));
|
||||||
}
|
}
|
||||||
|
|
||||||
inline bool compareSSE2x4(const char * p1, const char * p2)
|
inline bool compare64(const char * p1, const char * p2)
|
||||||
{
|
{
|
||||||
return 0xFFFF == _mm_movemask_epi8(
|
return 0xFFFF == _mm_movemask_epi8(
|
||||||
_mm_and_si128(
|
_mm_and_si128(
|
||||||
@ -102,7 +111,30 @@ inline bool compareSSE2x4(const char * p1, const char * p2)
|
|||||||
_mm_loadu_si128(reinterpret_cast<const __m128i *>(p2) + 3)))));
|
_mm_loadu_si128(reinterpret_cast<const __m128i *>(p2) + 3)))));
|
||||||
}
|
}
|
||||||
|
|
||||||
inline bool memequalSSE2Wide(const char * p1, const char * p2, size_t size)
|
#elif defined(__aarch64__) && defined(__ARM_NEON)
|
||||||
|
|
||||||
|
inline bool compare8(const char * p1, const char * p2)
|
||||||
|
{
|
||||||
|
uint64_t mask = getNibbleMask(vceqq_u8(
|
||||||
|
vld1q_u8(reinterpret_cast<const unsigned char *>(p1)), vld1q_u8(reinterpret_cast<const unsigned char *>(p2))));
|
||||||
|
return 0xFFFFFFFFFFFFFFFF == mask;
|
||||||
|
}
|
||||||
|
|
||||||
|
inline bool compare64(const char * p1, const char * p2)
|
||||||
|
{
|
||||||
|
uint64_t mask = getNibbleMask(vandq_u8(
|
||||||
|
vandq_u8(vceqq_u8(vld1q_u8(reinterpret_cast<const unsigned char *>(p1)), vld1q_u8(reinterpret_cast<const unsigned char *>(p2))),
|
||||||
|
vceqq_u8(vld1q_u8(reinterpret_cast<const unsigned char *>(p1 + 16)), vld1q_u8(reinterpret_cast<const unsigned char *>(p2 + 16)))),
|
||||||
|
vandq_u8(vceqq_u8(vld1q_u8(reinterpret_cast<const unsigned char *>(p1 + 32)), vld1q_u8(reinterpret_cast<const unsigned char *>(p2 + 32))),
|
||||||
|
vceqq_u8(vld1q_u8(reinterpret_cast<const unsigned char *>(p1 + 48)), vld1q_u8(reinterpret_cast<const unsigned char *>(p2 + 48))))));
|
||||||
|
return 0xFFFFFFFFFFFFFFFF == mask;
|
||||||
|
}
|
||||||
|
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#if defined(__SSE2__) || (defined(__aarch64__) && defined(__ARM_NEON))
|
||||||
|
|
||||||
|
inline bool memequalWide(const char * p1, const char * p2, size_t size)
|
||||||
{
|
{
|
||||||
/** The order of branches and the trick with overlapping comparisons
|
/** The order of branches and the trick with overlapping comparisons
|
||||||
* are the same as in memcpy implementation.
|
* are the same as in memcpy implementation.
|
||||||
@ -139,7 +171,7 @@ inline bool memequalSSE2Wide(const char * p1, const char * p2, size_t size)
|
|||||||
|
|
||||||
while (size >= 64)
|
while (size >= 64)
|
||||||
{
|
{
|
||||||
if (compareSSE2x4(p1, p2))
|
if (compare64(p1, p2))
|
||||||
{
|
{
|
||||||
p1 += 64;
|
p1 += 64;
|
||||||
p2 += 64;
|
p2 += 64;
|
||||||
@ -149,19 +181,18 @@ inline bool memequalSSE2Wide(const char * p1, const char * p2, size_t size)
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
switch (size / 16)
|
switch (size / 16) // NOLINT(bugprone-switch-missing-default-case)
|
||||||
{
|
{
|
||||||
case 3: if (!compareSSE2(p1 + 32, p2 + 32)) return false; [[fallthrough]];
|
case 3: if (!compare8(p1 + 32, p2 + 32)) return false; [[fallthrough]];
|
||||||
case 2: if (!compareSSE2(p1 + 16, p2 + 16)) return false; [[fallthrough]];
|
case 2: if (!compare8(p1 + 16, p2 + 16)) return false; [[fallthrough]];
|
||||||
case 1: if (!compareSSE2(p1, p2)) return false;
|
case 1: if (!compare8(p1, p2)) return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
return compareSSE2(p1 + size - 16, p2 + size - 16);
|
return compare8(p1 + size - 16, p2 + size - 16);
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
||||||
inline bool operator== (StringRef lhs, StringRef rhs)
|
inline bool operator== (StringRef lhs, StringRef rhs)
|
||||||
{
|
{
|
||||||
if (lhs.size != rhs.size)
|
if (lhs.size != rhs.size)
|
||||||
@ -170,8 +201,8 @@ inline bool operator== (StringRef lhs, StringRef rhs)
|
|||||||
if (lhs.size == 0)
|
if (lhs.size == 0)
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
#if defined(__SSE2__)
|
#if defined(__SSE2__) || (defined(__aarch64__) && defined(__ARM_NEON))
|
||||||
return memequalSSE2Wide(lhs.data, rhs.data, lhs.size);
|
return memequalWide(lhs.data, rhs.data, lhs.size);
|
||||||
#else
|
#else
|
||||||
return 0 == memcmp(lhs.data, rhs.data, lhs.size);
|
return 0 == memcmp(lhs.data, rhs.data, lhs.size);
|
||||||
#endif
|
#endif
|
||||||
@ -225,7 +256,7 @@ inline UInt64 shiftMix(UInt64 val)
|
|||||||
return val ^ (val >> 47);
|
return val ^ (val >> 47);
|
||||||
}
|
}
|
||||||
|
|
||||||
inline UInt64 rotateByAtLeast1(UInt64 val, int shift)
|
inline UInt64 rotateByAtLeast1(UInt64 val, UInt8 shift)
|
||||||
{
|
{
|
||||||
return (val >> shift) | (val << (64 - shift));
|
return (val >> shift) | (val << (64 - shift));
|
||||||
}
|
}
|
||||||
@ -237,8 +268,8 @@ inline size_t hashLessThan8(const char * data, size_t size)
|
|||||||
|
|
||||||
if (size >= 4)
|
if (size >= 4)
|
||||||
{
|
{
|
||||||
UInt64 a = unalignedLoad<uint32_t>(data);
|
UInt64 a = unalignedLoadLittleEndian<uint32_t>(data);
|
||||||
return hashLen16(size + (a << 3), unalignedLoad<uint32_t>(data + size - 4));
|
return hashLen16(size + (a << 3), unalignedLoadLittleEndian<uint32_t>(data + size - 4));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (size > 0)
|
if (size > 0)
|
||||||
@ -247,7 +278,7 @@ inline size_t hashLessThan8(const char * data, size_t size)
|
|||||||
uint8_t b = data[size >> 1];
|
uint8_t b = data[size >> 1];
|
||||||
uint8_t c = data[size - 1];
|
uint8_t c = data[size - 1];
|
||||||
uint32_t y = static_cast<uint32_t>(a) + (static_cast<uint32_t>(b) << 8);
|
uint32_t y = static_cast<uint32_t>(a) + (static_cast<uint32_t>(b) << 8);
|
||||||
uint32_t z = size + (static_cast<uint32_t>(c) << 2);
|
uint32_t z = static_cast<uint32_t>(size) + (static_cast<uint32_t>(c) << 2);
|
||||||
return shiftMix(y * k2 ^ z * k3) * k2;
|
return shiftMix(y * k2 ^ z * k3) * k2;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -258,9 +289,9 @@ inline size_t hashLessThan16(const char * data, size_t size)
|
|||||||
{
|
{
|
||||||
if (size > 8)
|
if (size > 8)
|
||||||
{
|
{
|
||||||
UInt64 a = unalignedLoad<UInt64>(data);
|
UInt64 a = unalignedLoadLittleEndian<UInt64>(data);
|
||||||
UInt64 b = unalignedLoad<UInt64>(data + size - 8);
|
UInt64 b = unalignedLoadLittleEndian<UInt64>(data + size - 8);
|
||||||
return hashLen16(a, rotateByAtLeast1(b + size, size)) ^ b;
|
return hashLen16(a, rotateByAtLeast1(b + size, static_cast<UInt8>(size))) ^ b;
|
||||||
}
|
}
|
||||||
|
|
||||||
return hashLessThan8(data, size);
|
return hashLessThan8(data, size);
|
||||||
@ -268,7 +299,7 @@ inline size_t hashLessThan16(const char * data, size_t size)
|
|||||||
|
|
||||||
struct CRC32Hash
|
struct CRC32Hash
|
||||||
{
|
{
|
||||||
size_t operator() (StringRef x) const
|
unsigned operator() (StringRef x) const
|
||||||
{
|
{
|
||||||
const char * pos = x.data;
|
const char * pos = x.data;
|
||||||
size_t size = x.size;
|
size_t size = x.size;
|
||||||
@ -276,24 +307,26 @@ struct CRC32Hash
|
|||||||
if (size == 0)
|
if (size == 0)
|
||||||
return 0;
|
return 0;
|
||||||
|
|
||||||
|
chassert(pos);
|
||||||
|
|
||||||
if (size < 8)
|
if (size < 8)
|
||||||
{
|
{
|
||||||
return hashLessThan8(x.data, x.size);
|
return static_cast<unsigned>(hashLessThan8(x.data, x.size));
|
||||||
}
|
}
|
||||||
|
|
||||||
const char * end = pos + size;
|
const char * end = pos + size;
|
||||||
size_t res = -1ULL;
|
unsigned res = -1U;
|
||||||
|
|
||||||
do
|
do
|
||||||
{
|
{
|
||||||
UInt64 word = unalignedLoad<UInt64>(pos);
|
UInt64 word = unalignedLoadLittleEndian<UInt64>(pos);
|
||||||
res = CRC_INT(res, word);
|
res = static_cast<unsigned>(CRC_INT(res, word));
|
||||||
|
|
||||||
pos += 8;
|
pos += 8;
|
||||||
} while (pos + 8 < end);
|
} while (pos + 8 < end);
|
||||||
|
|
||||||
UInt64 word = unalignedLoad<UInt64>(end - 8); /// I'm not sure if this is normal.
|
UInt64 word = unalignedLoadLittleEndian<UInt64>(end - 8); /// I'm not sure if this is normal.
|
||||||
res = CRC_INT(res, word);
|
res = static_cast<unsigned>(CRC_INT(res, word));
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
@ -305,7 +338,7 @@ struct StringRefHash : CRC32Hash {};
|
|||||||
|
|
||||||
struct CRC32Hash
|
struct CRC32Hash
|
||||||
{
|
{
|
||||||
size_t operator() (StringRef /* x */) const
|
unsigned operator() (StringRef /* x */) const
|
||||||
{
|
{
|
||||||
throw std::logic_error{"Not implemented CRC32Hash without SSE"};
|
throw std::logic_error{"Not implemented CRC32Hash without SSE"};
|
||||||
}
|
}
|
||||||
@ -329,5 +362,16 @@ namespace ZeroTraits
|
|||||||
inline void set(StringRef & x) { x.size = 0; }
|
inline void set(StringRef & x) { x.size = 0; }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
namespace PackedZeroTraits
|
||||||
|
{
|
||||||
|
template <typename Second, template <typename, typename> class PackedPairNoInit>
|
||||||
|
inline bool check(const PackedPairNoInit<StringRef, Second> p)
|
||||||
|
{ return 0 == p.key.size; }
|
||||||
|
|
||||||
|
template <typename Second, template <typename, typename> class PackedPairNoInit>
|
||||||
|
inline void set(PackedPairNoInit<StringRef, Second> & p)
|
||||||
|
{ p.key.size = 0; }
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
std::ostream & operator<<(std::ostream & os, const StringRef & str);
|
std::ostream & operator<<(std::ostream & os, const StringRef & str);
|
||||||
|
@ -4,7 +4,6 @@
|
|||||||
#include <type_traits>
|
#include <type_traits>
|
||||||
#include <utility>
|
#include <utility>
|
||||||
#include "defines.h"
|
#include "defines.h"
|
||||||
#include "TypePair.h"
|
|
||||||
|
|
||||||
/// General-purpose typelist. Easy on compilation times as it does not use recursion.
|
/// General-purpose typelist. Easy on compilation times as it does not use recursion.
|
||||||
template <typename ...Args>
|
template <typename ...Args>
|
||||||
@ -28,7 +27,7 @@ namespace TypeListUtils /// In some contexts it's more handy to use functions in
|
|||||||
constexpr Root<Args...> changeRoot(TypeList<Args...>) { return {}; }
|
constexpr Root<Args...> changeRoot(TypeList<Args...>) { return {}; }
|
||||||
|
|
||||||
template <typename F, typename ...Args>
|
template <typename F, typename ...Args>
|
||||||
constexpr void forEach(TypeList<Args...>, F && f) { (std::forward<F>(f)(Id<Args>{}), ...); }
|
constexpr void forEach(TypeList<Args...>, F && f) { (std::forward<F>(f)(TypeList<Args>{}), ...); }
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename TypeListLeft, typename TypeListRight>
|
template <typename TypeListLeft, typename TypeListRight>
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
#include "Decimal.h"
|
#include "Decimal.h"
|
||||||
#include "UUID.h"
|
#include "UUID.h"
|
||||||
|
#include "IPv4andIPv6.h"
|
||||||
|
|
||||||
namespace DB
|
namespace DB
|
||||||
{
|
{
|
||||||
@ -35,6 +36,8 @@ TN_MAP(Float32)
|
|||||||
TN_MAP(Float64)
|
TN_MAP(Float64)
|
||||||
TN_MAP(String)
|
TN_MAP(String)
|
||||||
TN_MAP(UUID)
|
TN_MAP(UUID)
|
||||||
|
TN_MAP(IPv4)
|
||||||
|
TN_MAP(IPv6)
|
||||||
TN_MAP(Decimal32)
|
TN_MAP(Decimal32)
|
||||||
TN_MAP(Decimal64)
|
TN_MAP(Decimal64)
|
||||||
TN_MAP(Decimal128)
|
TN_MAP(Decimal128)
|
||||||
|
@ -1,4 +0,0 @@
|
|||||||
#pragma once
|
|
||||||
|
|
||||||
template <typename T, typename V> struct TypePair {};
|
|
||||||
template <typename T> struct Id {};
|
|
@ -3,13 +3,29 @@
|
|||||||
#include <Poco/Util/LayeredConfiguration.h>
|
#include <Poco/Util/LayeredConfiguration.h>
|
||||||
#include <Poco/Util/MapConfiguration.h>
|
#include <Poco/Util/MapConfiguration.h>
|
||||||
|
|
||||||
|
void argsToConfig(const Poco::Util::Application::ArgVec & argv,
|
||||||
void argsToConfig(const Poco::Util::Application::ArgVec & argv, Poco::Util::LayeredConfiguration & config, int priority)
|
Poco::Util::LayeredConfiguration & config,
|
||||||
|
int priority,
|
||||||
|
const std::unordered_set<std::string>* alias_names)
|
||||||
{
|
{
|
||||||
/// Parsing all args and converting to config layer
|
/// Parsing all args and converting to config layer
|
||||||
/// Test: -- --1=1 --1=2 --3 5 7 8 -9 10 -11=12 14= 15== --16==17 --=18 --19= --20 21 22 --23 --24 25 --26 -27 28 ---29=30 -- ----31 32 --33 3-4
|
/// Test: -- --1=1 --1=2 --3 5 7 8 -9 10 -11=12 14= 15== --16==17 --=18 --19= --20 21 22 --23 --24 25 --26 -27 28 ---29=30 -- ----31 32 --33 3-4
|
||||||
Poco::AutoPtr<Poco::Util::MapConfiguration> map_config = new Poco::Util::MapConfiguration;
|
Poco::AutoPtr<Poco::Util::MapConfiguration> map_config = new Poco::Util::MapConfiguration;
|
||||||
std::string key;
|
std::string key;
|
||||||
|
|
||||||
|
auto add_arg = [&map_config, &alias_names](const std::string & k, const std::string & v)
|
||||||
|
{
|
||||||
|
map_config->setString(k, v);
|
||||||
|
|
||||||
|
if (alias_names && !alias_names->contains(k))
|
||||||
|
{
|
||||||
|
std::string alias_key = k;
|
||||||
|
std::replace(alias_key.begin(), alias_key.end(), '-', '_');
|
||||||
|
if (alias_names->contains(alias_key))
|
||||||
|
map_config->setString(alias_key, v);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
for (const auto & arg : argv)
|
for (const auto & arg : argv)
|
||||||
{
|
{
|
||||||
auto key_start = arg.find_first_not_of('-');
|
auto key_start = arg.find_first_not_of('-');
|
||||||
@ -19,7 +35,7 @@ void argsToConfig(const Poco::Util::Application::ArgVec & argv, Poco::Util::Laye
|
|||||||
// old saved '--key', will set to some true value "1"
|
// old saved '--key', will set to some true value "1"
|
||||||
if (!key.empty() && pos_minus != std::string::npos && pos_minus < key_start)
|
if (!key.empty() && pos_minus != std::string::npos && pos_minus < key_start)
|
||||||
{
|
{
|
||||||
map_config->setString(key, "1");
|
add_arg(key, "1");
|
||||||
key = "";
|
key = "";
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -29,7 +45,7 @@ void argsToConfig(const Poco::Util::Application::ArgVec & argv, Poco::Util::Laye
|
|||||||
{
|
{
|
||||||
if (pos_minus == std::string::npos || pos_minus > key_start)
|
if (pos_minus == std::string::npos || pos_minus > key_start)
|
||||||
{
|
{
|
||||||
map_config->setString(key, arg);
|
add_arg(key, arg);
|
||||||
}
|
}
|
||||||
key = "";
|
key = "";
|
||||||
}
|
}
|
||||||
@ -55,7 +71,7 @@ void argsToConfig(const Poco::Util::Application::ArgVec & argv, Poco::Util::Laye
|
|||||||
if (arg.size() > pos_eq)
|
if (arg.size() > pos_eq)
|
||||||
value = arg.substr(pos_eq + 1);
|
value = arg.substr(pos_eq + 1);
|
||||||
|
|
||||||
map_config->setString(key, value);
|
add_arg(key, value);
|
||||||
key = "";
|
key = "";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,11 +1,16 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <Poco/Util/Application.h>
|
#include <Poco/Util/Application.h>
|
||||||
|
#include <string>
|
||||||
|
#include <unordered_set>
|
||||||
|
|
||||||
namespace Poco::Util
|
namespace Poco::Util
|
||||||
{
|
{
|
||||||
class LayeredConfiguration;
|
class LayeredConfiguration; // NOLINT(cppcoreguidelines-virtual-class-destructor)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Import extra command line arguments to configuration. These are command line arguments after --.
|
/// Import extra command line arguments to configuration. These are command line arguments after --.
|
||||||
void argsToConfig(const Poco::Util::Application::ArgVec & argv, Poco::Util::LayeredConfiguration & config, int priority);
|
void argsToConfig(const Poco::Util::Application::ArgVec & argv,
|
||||||
|
Poco::Util::LayeredConfiguration & config,
|
||||||
|
int priority,
|
||||||
|
const std::unordered_set<std::string>* registered_alias_names = nullptr);
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
#include <base/extended_types.h>
|
#include <base/extended_types.h>
|
||||||
#include <base/defines.h>
|
#include <base/defines.h>
|
||||||
|
|
||||||
|
// NOLINTBEGIN(google-runtime-int)
|
||||||
|
|
||||||
namespace common
|
namespace common
|
||||||
{
|
{
|
||||||
@ -206,3 +207,5 @@ namespace common
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// NOLINTEND(google-runtime-int)
|
||||||
|
@ -1,27 +1,39 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <string.h>
|
#include <cstring>
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
#include <type_traits>
|
#include <type_traits>
|
||||||
|
|
||||||
|
|
||||||
/** \brief Returns value `from` converted to type `To` while retaining bit representation.
|
/** Returns value `from` converted to type `To` while retaining bit representation.
|
||||||
* `To` and `From` must satisfy `CopyConstructible`.
|
* `To` and `From` must satisfy `CopyConstructible`.
|
||||||
|
*
|
||||||
|
* In contrast to std::bit_cast can cast types of different width.
|
||||||
|
*
|
||||||
|
* Note: for signed types of narrower size, the casted result is zero-extended
|
||||||
|
* instead of sign-extended as with regular static_cast.
|
||||||
|
* For example, -1 Int8 (represented as 0xFF) bit_casted to UInt64
|
||||||
|
* gives 255 (represented as 0x00000000000000FF) instead of 0xFFFFFFFFFFFFFFFF
|
||||||
*/
|
*/
|
||||||
template <typename To, typename From>
|
template <typename To, typename From>
|
||||||
std::decay_t<To> bit_cast(const From & from)
|
std::decay_t<To> bit_cast(const From & from)
|
||||||
{
|
{
|
||||||
|
/** Assume the source value is 0xAABBCCDD (i.e. sizeof(from) == 4).
|
||||||
|
* Its BE representation is 0xAABBCCDD, the LE representation is 0xDDCCBBAA.
|
||||||
|
* Further assume, sizeof(res) == 8 and that res is initially zeroed out.
|
||||||
|
* With LE, the result after bit_cast will be 0xDDCCBBAA00000000 --> input value == output value.
|
||||||
|
* With BE, the result after bit_cast will be 0x00000000AABBCCDD --> input value == output value.
|
||||||
|
*/
|
||||||
To res {};
|
To res {};
|
||||||
memcpy(static_cast<void*>(&res), &from, std::min(sizeof(res), sizeof(from)));
|
if constexpr (std::endian::native == std::endian::little)
|
||||||
|
{
|
||||||
|
memcpy(static_cast<void*>(&res), &from, std::min(sizeof(res), sizeof(from)));
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
uint32_t offset_to = (sizeof(res) > sizeof(from)) ? (sizeof(res) - sizeof(from)) : 0;
|
||||||
|
uint32_t offset_from = (sizeof(from) > sizeof(res)) ? (sizeof(from) - sizeof(res)) : 0;
|
||||||
|
memcpy(reinterpret_cast<char *>(&res) + offset_to, reinterpret_cast<const char *>(&from) + offset_from, std::min(sizeof(res), sizeof(from)));
|
||||||
|
}
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** \brief Returns value `from` converted to type `To` while retaining bit representation.
|
|
||||||
* `To` and `From` must satisfy `CopyConstructible`.
|
|
||||||
*/
|
|
||||||
template <typename To, typename From>
|
|
||||||
std::decay_t<To> safe_bit_cast(const From & from)
|
|
||||||
{
|
|
||||||
static_assert(sizeof(To) == sizeof(From), "bit cast on types of different width");
|
|
||||||
return bit_cast<To, From>(from);
|
|
||||||
}
|
|
||||||
|
@ -1,9 +1,15 @@
|
|||||||
#include "coverage.h"
|
#include "coverage.h"
|
||||||
|
|
||||||
|
#pragma GCC diagnostic ignored "-Wreserved-identifier"
|
||||||
|
|
||||||
|
|
||||||
|
/// WITH_COVERAGE enables the default implementation of code coverage,
|
||||||
|
/// that dumps a map to the filesystem.
|
||||||
|
|
||||||
#if WITH_COVERAGE
|
#if WITH_COVERAGE
|
||||||
|
|
||||||
# include <mutex>
|
#include <mutex>
|
||||||
# include <unistd.h>
|
#include <unistd.h>
|
||||||
|
|
||||||
|
|
||||||
# if defined(__clang__)
|
# if defined(__clang__)
|
||||||
@ -29,3 +35,131 @@ void dumpCoverageReportIfPossible()
|
|||||||
|
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/// SANITIZE_COVERAGE enables code instrumentation,
|
||||||
|
/// but leaves the callbacks implementation to us,
|
||||||
|
/// which we use to calculate coverage on a per-test basis
|
||||||
|
/// and to write it to system tables.
|
||||||
|
|
||||||
|
#if defined(SANITIZE_COVERAGE)
|
||||||
|
|
||||||
|
namespace
|
||||||
|
{
|
||||||
|
bool pc_guards_initialized = false;
|
||||||
|
bool pc_table_initialized = false;
|
||||||
|
|
||||||
|
uint32_t * guards_start = nullptr;
|
||||||
|
uint32_t * guards_end = nullptr;
|
||||||
|
|
||||||
|
uintptr_t * coverage_array = nullptr;
|
||||||
|
size_t coverage_array_size = 0;
|
||||||
|
|
||||||
|
uintptr_t * all_addresses_array = nullptr;
|
||||||
|
size_t all_addresses_array_size = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
extern "C"
|
||||||
|
{
|
||||||
|
|
||||||
|
/// This is called at least once for every DSO for initialization.
|
||||||
|
/// But we will use it only for the main DSO.
|
||||||
|
void __sanitizer_cov_trace_pc_guard_init(uint32_t * start, uint32_t * stop)
|
||||||
|
{
|
||||||
|
if (pc_guards_initialized)
|
||||||
|
return;
|
||||||
|
pc_guards_initialized = true;
|
||||||
|
|
||||||
|
/// The function can be called multiple times, but we need to initialize only once.
|
||||||
|
if (start == stop || *start)
|
||||||
|
return;
|
||||||
|
|
||||||
|
guards_start = start;
|
||||||
|
guards_end = stop;
|
||||||
|
coverage_array_size = stop - start;
|
||||||
|
|
||||||
|
/// Note: we will leak this.
|
||||||
|
coverage_array = static_cast<uintptr_t*>(malloc(sizeof(uintptr_t) * coverage_array_size));
|
||||||
|
|
||||||
|
resetCoverage();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This is called at least once for every DSO for initialization
|
||||||
|
/// and provides information about all instrumented addresses.
|
||||||
|
void __sanitizer_cov_pcs_init(const uintptr_t * pcs_begin, const uintptr_t * pcs_end)
|
||||||
|
{
|
||||||
|
if (pc_table_initialized)
|
||||||
|
return;
|
||||||
|
pc_table_initialized = true;
|
||||||
|
|
||||||
|
all_addresses_array = static_cast<uintptr_t*>(malloc(sizeof(uintptr_t) * coverage_array_size));
|
||||||
|
all_addresses_array_size = pcs_end - pcs_begin;
|
||||||
|
|
||||||
|
/// They are not a real pointers, but also contain a flag in the most significant bit,
|
||||||
|
/// in which we are not interested for now. Reset it.
|
||||||
|
for (size_t i = 0; i < all_addresses_array_size; ++i)
|
||||||
|
all_addresses_array[i] = pcs_begin[i] & 0x7FFFFFFFFFFFFFFFULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This is called at every basic block / edge, etc.
|
||||||
|
void __sanitizer_cov_trace_pc_guard(uint32_t * guard)
|
||||||
|
{
|
||||||
|
/// Duplicate the guard check.
|
||||||
|
if (!*guard)
|
||||||
|
return;
|
||||||
|
*guard = 0;
|
||||||
|
|
||||||
|
/// If you set *guard to 0 this code will not be called again for this edge.
|
||||||
|
/// Now we can get the PC and do whatever you want:
|
||||||
|
/// - store it somewhere or symbolize it and print right away.
|
||||||
|
/// The values of `*guard` are as you set them in
|
||||||
|
/// __sanitizer_cov_trace_pc_guard_init and so you can make them consecutive
|
||||||
|
/// and use them to dereference an array or a bit vector.
|
||||||
|
void * pc = __builtin_return_address(0);
|
||||||
|
|
||||||
|
coverage_array[guard - guards_start] = reinterpret_cast<uintptr_t>(pc);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
__attribute__((no_sanitize("coverage")))
|
||||||
|
std::span<const uintptr_t> getCoverage()
|
||||||
|
{
|
||||||
|
return {coverage_array, coverage_array_size};
|
||||||
|
}
|
||||||
|
|
||||||
|
__attribute__((no_sanitize("coverage")))
|
||||||
|
std::span<const uintptr_t> getAllInstrumentedAddresses()
|
||||||
|
{
|
||||||
|
return {all_addresses_array, all_addresses_array_size};
|
||||||
|
}
|
||||||
|
|
||||||
|
__attribute__((no_sanitize("coverage")))
|
||||||
|
void resetCoverage()
|
||||||
|
{
|
||||||
|
memset(coverage_array, 0, coverage_array_size * sizeof(*coverage_array));
|
||||||
|
|
||||||
|
/// The guard defines whether the __sanitizer_cov_trace_pc_guard should be called.
|
||||||
|
/// For example, you can unset it after first invocation to prevent excessive work.
|
||||||
|
/// Initially set all the guards to 1 to enable callbacks.
|
||||||
|
for (uint32_t * x = guards_start; x < guards_end; ++x)
|
||||||
|
*x = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
#else
|
||||||
|
|
||||||
|
std::span<const uintptr_t> getCoverage()
|
||||||
|
{
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
std::span<const uintptr_t> getAllInstrumentedAddresses()
|
||||||
|
{
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
void resetCoverage()
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
#endif
|
||||||
|
@ -1,5 +1,8 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
|
#include <span>
|
||||||
|
#include <cstdint>
|
||||||
|
|
||||||
/// Flush coverage report to file, depending on coverage system
|
/// Flush coverage report to file, depending on coverage system
|
||||||
/// proposed by compiler (llvm for clang and gcov for gcc).
|
/// proposed by compiler (llvm for clang and gcov for gcc).
|
||||||
///
|
///
|
||||||
@ -7,3 +10,16 @@
|
|||||||
/// Thread safe (use exclusive lock).
|
/// Thread safe (use exclusive lock).
|
||||||
/// Idempotent, may be called multiple times.
|
/// Idempotent, may be called multiple times.
|
||||||
void dumpCoverageReportIfPossible();
|
void dumpCoverageReportIfPossible();
|
||||||
|
|
||||||
|
/// This is effective if SANITIZE_COVERAGE is enabled at build time.
|
||||||
|
/// Get accumulated unique program addresses of the instrumented parts of the code,
|
||||||
|
/// seen so far after program startup or after previous reset.
|
||||||
|
/// The returned span will be represented as a sparse map, containing mostly zeros, which you should filter away.
|
||||||
|
std::span<const uintptr_t> getCoverage();
|
||||||
|
|
||||||
|
/// Get all instrumented addresses that could be in the coverage.
|
||||||
|
std::span<const uintptr_t> getAllInstrumentedAddresses();
|
||||||
|
|
||||||
|
/// Reset the accumulated coverage.
|
||||||
|
/// This is useful to compare coverage of different tests, including differential coverage.
|
||||||
|
void resetCoverage();
|
||||||
|
26
base/base/crc32c_s390x.h
Normal file
26
base/base/crc32c_s390x.h
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <crc32-s390x.h>
|
||||||
|
|
||||||
|
inline uint32_t s390x_crc32c_u8(uint32_t crc, uint8_t v)
|
||||||
|
{
|
||||||
|
return crc32c_le_vx(crc, reinterpret_cast<unsigned char *>(&v), sizeof(v));
|
||||||
|
}
|
||||||
|
|
||||||
|
inline uint32_t s390x_crc32c_u16(uint32_t crc, uint16_t v)
|
||||||
|
{
|
||||||
|
v = __builtin_bswap16(v);
|
||||||
|
return crc32c_le_vx(crc, reinterpret_cast<unsigned char *>(&v), sizeof(v));
|
||||||
|
}
|
||||||
|
|
||||||
|
inline uint32_t s390x_crc32c_u32(uint32_t crc, uint32_t v)
|
||||||
|
{
|
||||||
|
v = __builtin_bswap32(v);
|
||||||
|
return crc32c_le_vx(crc, reinterpret_cast<unsigned char *>(&v), sizeof(v));
|
||||||
|
}
|
||||||
|
|
||||||
|
inline uint64_t s390x_crc32c(uint64_t crc, uint64_t v)
|
||||||
|
{
|
||||||
|
v = __builtin_bswap64(v);
|
||||||
|
return crc32c_le_vx(static_cast<uint32_t>(crc), reinterpret_cast<unsigned char *>(&v), sizeof(uint64_t));
|
||||||
|
}
|
@ -28,8 +28,8 @@
|
|||||||
#define NO_INLINE __attribute__((__noinline__))
|
#define NO_INLINE __attribute__((__noinline__))
|
||||||
#define MAY_ALIAS __attribute__((__may_alias__))
|
#define MAY_ALIAS __attribute__((__may_alias__))
|
||||||
|
|
||||||
#if !defined(__x86_64__) && !defined(__aarch64__) && !defined(__PPC__) && !(defined(__riscv) && (__riscv_xlen == 64))
|
#if !defined(__x86_64__) && !defined(__aarch64__) && !defined(__PPC__) && !defined(__s390x__) && !(defined(__riscv) && (__riscv_xlen == 64))
|
||||||
# error "The only supported platforms are x86_64 and AArch64, PowerPC (work in progress) and RISC-V 64 (experimental)"
|
# error "The only supported platforms are x86_64 and AArch64, PowerPC (work in progress), s390x (work in progress) and RISC-V 64 (experimental)"
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/// Check for presence of address sanitizer
|
/// Check for presence of address sanitizer
|
||||||
@ -73,18 +73,6 @@
|
|||||||
# endif
|
# endif
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if defined(ADDRESS_SANITIZER)
|
|
||||||
# define BOOST_USE_ASAN 1
|
|
||||||
# define BOOST_USE_UCONTEXT 1
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#if defined(THREAD_SANITIZER)
|
|
||||||
# define BOOST_USE_TSAN 1
|
|
||||||
# define BOOST_USE_UCONTEXT 1
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/// TODO: Strange enough, there is no way to detect UB sanitizer.
|
|
||||||
|
|
||||||
/// Explicitly allow undefined behaviour for certain functions. Use it as a function attribute.
|
/// Explicitly allow undefined behaviour for certain functions. Use it as a function attribute.
|
||||||
/// It is useful in case when compiler cannot see (and exploit) it, but UBSan can.
|
/// It is useful in case when compiler cannot see (and exploit) it, but UBSan can.
|
||||||
/// Example: multiplication of signed integers with possibility of overflow when both sides are from user input.
|
/// Example: multiplication of signed integers with possibility of overflow when both sides are from user input.
|
||||||
@ -100,13 +88,104 @@
|
|||||||
# define ALWAYS_INLINE_NO_SANITIZE_UNDEFINED ALWAYS_INLINE
|
# define ALWAYS_INLINE_NO_SANITIZE_UNDEFINED ALWAYS_INLINE
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#if defined(__clang__) && defined(__clang_major__) && __clang_major__ >= 14
|
||||||
|
# define DISABLE_SANITIZER_INSTRUMENTATION __attribute__((disable_sanitizer_instrumentation))
|
||||||
|
#else
|
||||||
|
# define DISABLE_SANITIZER_INSTRUMENTATION
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
#if !__has_include(<sanitizer/asan_interface.h>) || !defined(ADDRESS_SANITIZER)
|
#if !__has_include(<sanitizer/asan_interface.h>) || !defined(ADDRESS_SANITIZER)
|
||||||
# define ASAN_UNPOISON_MEMORY_REGION(a, b)
|
# define ASAN_UNPOISON_MEMORY_REGION(a, b)
|
||||||
# define ASAN_POISON_MEMORY_REGION(a, b)
|
# define ASAN_POISON_MEMORY_REGION(a, b)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#if !defined(ABORT_ON_LOGICAL_ERROR)
|
||||||
|
#if !defined(NDEBUG) || defined(ADDRESS_SANITIZER) || defined(THREAD_SANITIZER) || defined(MEMORY_SANITIZER) || defined(UNDEFINED_BEHAVIOR_SANITIZER)
|
||||||
|
#define ABORT_ON_LOGICAL_ERROR
|
||||||
|
#endif
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/// chassert(x) is similar to assert(x), but:
|
||||||
|
/// - works in builds with sanitizers, not only in debug builds
|
||||||
|
/// - tries to print failed assertion into server log
|
||||||
|
/// It can be used for all assertions except heavy ones.
|
||||||
|
/// Heavy assertions (that run loops or call complex functions) are allowed in debug builds only.
|
||||||
|
/// Also it makes sense to call abort() instead of __builtin_unreachable() in debug builds,
|
||||||
|
/// because SIGABRT is easier to debug than SIGTRAP (the second one makes gdb crazy)
|
||||||
|
#if !defined(chassert)
|
||||||
|
#if defined(ABORT_ON_LOGICAL_ERROR)
|
||||||
|
// clang-format off
|
||||||
|
#include <base/types.h>
|
||||||
|
namespace DB
|
||||||
|
{
|
||||||
|
[[noreturn]] void abortOnFailedAssertion(const String & description);
|
||||||
|
}
|
||||||
|
#define chassert(x) do { static_cast<bool>(x) ? void(0) : ::DB::abortOnFailedAssertion(#x); } while (0)
|
||||||
|
#define UNREACHABLE() abort()
|
||||||
|
// clang-format off
|
||||||
|
#else
|
||||||
|
/// Here sizeof() trick is used to suppress unused warning for result,
|
||||||
|
/// since simple "(void)x" will evaluate the expression, while
|
||||||
|
/// "sizeof(!(x))" will not.
|
||||||
|
#define chassert(x) (void)sizeof(!(x))
|
||||||
|
#define UNREACHABLE() __builtin_unreachable()
|
||||||
|
#endif
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/// Macros for Clang Thread Safety Analysis (TSA). They can be safely ignored by other compilers.
|
||||||
|
/// Feel free to extend, but please stay close to https://clang.llvm.org/docs/ThreadSafetyAnalysis.html#mutexheader
|
||||||
|
#if defined(__clang__)
|
||||||
|
# define TSA_GUARDED_BY(...) __attribute__((guarded_by(__VA_ARGS__))) /// data is protected by given capability
|
||||||
|
# define TSA_PT_GUARDED_BY(...) __attribute__((pt_guarded_by(__VA_ARGS__))) /// pointed-to data is protected by the given capability
|
||||||
|
# define TSA_REQUIRES(...) __attribute__((requires_capability(__VA_ARGS__))) /// thread needs exclusive possession of given capability
|
||||||
|
# define TSA_REQUIRES_SHARED(...) __attribute__((requires_shared_capability(__VA_ARGS__))) /// thread needs shared possession of given capability
|
||||||
|
# define TSA_ACQUIRED_AFTER(...) __attribute__((acquired_after(__VA_ARGS__))) /// annotated lock must be locked after given lock
|
||||||
|
# define TSA_NO_THREAD_SAFETY_ANALYSIS __attribute__((no_thread_safety_analysis)) /// disable TSA for a function
|
||||||
|
# define TSA_CAPABILITY(...) __attribute__((capability(__VA_ARGS__))) /// object of a class can be used as capability
|
||||||
|
# define TSA_ACQUIRE(...) __attribute__((acquire_capability(__VA_ARGS__))) /// function acquires a capability, but does not release it
|
||||||
|
# define TSA_TRY_ACQUIRE(...) __attribute__((try_acquire_capability(__VA_ARGS__))) /// function tries to acquire a capability and returns a boolean value indicating success or failure
|
||||||
|
# define TSA_RELEASE(...) __attribute__((release_capability(__VA_ARGS__))) /// function releases the given capability
|
||||||
|
# define TSA_ACQUIRE_SHARED(...) __attribute__((acquire_shared_capability(__VA_ARGS__))) /// function acquires a shared capability, but does not release it
|
||||||
|
# define TSA_TRY_ACQUIRE_SHARED(...) __attribute__((try_acquire_shared_capability(__VA_ARGS__))) /// function tries to acquire a shared capability and returns a boolean value indicating success or failure
|
||||||
|
# define TSA_RELEASE_SHARED(...) __attribute__((release_shared_capability(__VA_ARGS__))) /// function releases the given shared capability
|
||||||
|
# define TSA_SCOPED_LOCKABLE __attribute__((scoped_lockable)) /// object of a class has scoped lockable capability
|
||||||
|
|
||||||
|
/// Macros for suppressing TSA warnings for specific reads/writes (instead of suppressing it for the whole function)
|
||||||
|
/// They use a lambda function to apply function attribute to a single statement. This enable us to suppress warnings locally instead of
|
||||||
|
/// suppressing them in the whole function
|
||||||
|
/// Consider adding a comment when using these macros.
|
||||||
|
# define TSA_SUPPRESS_WARNING_FOR_READ(x) ([&]() TSA_NO_THREAD_SAFETY_ANALYSIS -> const auto & { return (x); }())
|
||||||
|
# define TSA_SUPPRESS_WARNING_FOR_WRITE(x) ([&]() TSA_NO_THREAD_SAFETY_ANALYSIS -> auto & { return (x); }())
|
||||||
|
|
||||||
|
/// This macro is useful when only one thread writes to a member
|
||||||
|
/// and you want to read this member from the same thread without locking a mutex.
|
||||||
|
/// It's safe (because no concurrent writes are possible), but TSA generates a warning.
|
||||||
|
/// (Seems like there's no way to verify it, but it makes sense to distinguish it from TSA_SUPPRESS_WARNING_FOR_READ for readability)
|
||||||
|
# define TSA_READ_ONE_THREAD(x) TSA_SUPPRESS_WARNING_FOR_READ(x)
|
||||||
|
|
||||||
|
#else
|
||||||
|
# define TSA_GUARDED_BY(...)
|
||||||
|
# define TSA_PT_GUARDED_BY(...)
|
||||||
|
# define TSA_REQUIRES(...)
|
||||||
|
# define TSA_REQUIRES_SHARED(...)
|
||||||
|
# define TSA_NO_THREAD_SAFETY_ANALYSIS
|
||||||
|
# define TSA_CAPABILITY(...)
|
||||||
|
# define TSA_ACQUIRE(...)
|
||||||
|
# define TSA_TRY_ACQUIRE(...)
|
||||||
|
# define TSA_RELEASE(...)
|
||||||
|
# define TSA_ACQUIRE_SHARED(...)
|
||||||
|
# define TSA_TRY_ACQUIRE_SHARED(...)
|
||||||
|
# define TSA_RELEASE_SHARED(...)
|
||||||
|
# define TSA_SCOPED_LOCKABLE
|
||||||
|
|
||||||
|
# define TSA_SUPPRESS_WARNING_FOR_READ(x) (x)
|
||||||
|
# define TSA_SUPPRESS_WARNING_FOR_WRITE(x) (x)
|
||||||
|
# define TSA_READ_ONE_THREAD(x) TSA_SUPPRESS_WARNING_FOR_READ(x)
|
||||||
|
#endif
|
||||||
|
|
||||||
/// A template function for suppressing warnings about unused variables or function results.
|
/// A template function for suppressing warnings about unused variables or function results.
|
||||||
template <typename... Args>
|
template <typename... Args>
|
||||||
constexpr void UNUSED(Args &&... args [[maybe_unused]])
|
constexpr void UNUSED(Args &&... args [[maybe_unused]]) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
#include <base/demangle.h>
|
#include <base/demangle.h>
|
||||||
|
|
||||||
#include <stdlib.h>
|
#include <cstdlib>
|
||||||
#include <cxxabi.h>
|
#include <cxxabi.h>
|
||||||
|
|
||||||
static DemangleResult tryDemangle(const char * name, int & status)
|
static DemangleResult tryDemangle(const char * name, int & status)
|
||||||
|
@ -27,6 +27,6 @@ struct FreeingDeleter
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef std::unique_ptr<char, FreeingDeleter> DemangleResult;
|
using DemangleResult = std::unique_ptr<char, FreeingDeleter>;
|
||||||
|
|
||||||
DemangleResult tryDemangle(const char * name);
|
DemangleResult tryDemangle(const char * name);
|
||||||
|
@ -3,19 +3,20 @@
|
|||||||
#include <fmt/format.h>
|
#include <fmt/format.h>
|
||||||
|
|
||||||
|
|
||||||
std::string errnoToString(int code, int the_errno)
|
std::string errnoToString(int the_errno)
|
||||||
{
|
{
|
||||||
const size_t buf_size = 128;
|
const size_t buf_size = 128;
|
||||||
char buf[buf_size];
|
char buf[buf_size];
|
||||||
|
|
||||||
#ifndef _GNU_SOURCE
|
#ifndef _GNU_SOURCE
|
||||||
int rc = strerror_r(the_errno, buf, buf_size);
|
int rc = strerror_r(the_errno, buf, buf_size);
|
||||||
#ifdef __APPLE__
|
#ifdef OS_DARWIN
|
||||||
if (rc != 0 && rc != EINVAL)
|
if (rc != 0 && rc != EINVAL)
|
||||||
#else
|
#else
|
||||||
if (rc != 0)
|
if (rc != 0)
|
||||||
#endif
|
#endif
|
||||||
{
|
{
|
||||||
std::string tmp = std::to_string(code);
|
std::string tmp = std::to_string(the_errno);
|
||||||
const char * code_str = tmp.c_str();
|
const char * code_str = tmp.c_str();
|
||||||
const char * unknown_message = "Unknown error ";
|
const char * unknown_message = "Unknown error ";
|
||||||
strcpy(buf, unknown_message);
|
strcpy(buf, unknown_message);
|
||||||
@ -23,7 +24,6 @@ std::string errnoToString(int code, int the_errno)
|
|||||||
}
|
}
|
||||||
return fmt::format("errno: {}, strerror: {}", the_errno, buf);
|
return fmt::format("errno: {}, strerror: {}", the_errno, buf);
|
||||||
#else
|
#else
|
||||||
(void)code;
|
|
||||||
return fmt::format("errno: {}, strerror: {}", the_errno, strerror_r(the_errno, buf, sizeof(buf)));
|
return fmt::format("errno: {}, strerror: {}", the_errno, strerror_r(the_errno, buf, sizeof(buf)));
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
@ -3,4 +3,4 @@
|
|||||||
#include <cerrno>
|
#include <cerrno>
|
||||||
#include <string>
|
#include <string>
|
||||||
|
|
||||||
std::string errnoToString(int code, int the_errno = errno);
|
std::string errnoToString(int the_errno = errno);
|
||||||
|
@ -5,7 +5,6 @@
|
|||||||
#include <base/types.h>
|
#include <base/types.h>
|
||||||
#include <base/wide_integer.h>
|
#include <base/wide_integer.h>
|
||||||
|
|
||||||
|
|
||||||
using Int128 = wide::integer<128, signed>;
|
using Int128 = wide::integer<128, signed>;
|
||||||
using UInt128 = wide::integer<128, unsigned>;
|
using UInt128 = wide::integer<128, unsigned>;
|
||||||
using Int256 = wide::integer<256, signed>;
|
using Int256 = wide::integer<256, signed>;
|
||||||
@ -18,7 +17,7 @@ static_assert(sizeof(UInt256) == 32);
|
|||||||
/// (std::common_type), are "set in stone". Attempting to specialize them causes undefined behavior.
|
/// (std::common_type), are "set in stone". Attempting to specialize them causes undefined behavior.
|
||||||
/// So instead of using the std type_traits, we use our own version which allows extension.
|
/// So instead of using the std type_traits, we use our own version which allows extension.
|
||||||
template <typename T>
|
template <typename T>
|
||||||
struct is_signed
|
struct is_signed // NOLINT(readability-identifier-naming)
|
||||||
{
|
{
|
||||||
static constexpr bool value = std::is_signed_v<T>;
|
static constexpr bool value = std::is_signed_v<T>;
|
||||||
};
|
};
|
||||||
@ -30,7 +29,7 @@ template <typename T>
|
|||||||
inline constexpr bool is_signed_v = is_signed<T>::value;
|
inline constexpr bool is_signed_v = is_signed<T>::value;
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
struct is_unsigned
|
struct is_unsigned // NOLINT(readability-identifier-naming)
|
||||||
{
|
{
|
||||||
static constexpr bool value = std::is_unsigned_v<T>;
|
static constexpr bool value = std::is_unsigned_v<T>;
|
||||||
};
|
};
|
||||||
@ -51,7 +50,7 @@ template <class T> concept is_integer =
|
|||||||
template <class T> concept is_floating_point = std::is_floating_point_v<T>;
|
template <class T> concept is_floating_point = std::is_floating_point_v<T>;
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
struct is_arithmetic
|
struct is_arithmetic // NOLINT(readability-identifier-naming)
|
||||||
{
|
{
|
||||||
static constexpr bool value = std::is_arithmetic_v<T>;
|
static constexpr bool value = std::is_arithmetic_v<T>;
|
||||||
};
|
};
|
||||||
@ -66,9 +65,9 @@ template <typename T>
|
|||||||
inline constexpr bool is_arithmetic_v = is_arithmetic<T>::value;
|
inline constexpr bool is_arithmetic_v = is_arithmetic<T>::value;
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
struct make_unsigned
|
struct make_unsigned // NOLINT(readability-identifier-naming)
|
||||||
{
|
{
|
||||||
typedef std::make_unsigned_t<T> type;
|
using type = std::make_unsigned_t<T>;
|
||||||
};
|
};
|
||||||
|
|
||||||
template <> struct make_unsigned<Int128> { using type = UInt128; };
|
template <> struct make_unsigned<Int128> { using type = UInt128; };
|
||||||
@ -79,9 +78,9 @@ template <> struct make_unsigned<UInt256> { using type = UInt256; };
|
|||||||
template <typename T> using make_unsigned_t = typename make_unsigned<T>::type;
|
template <typename T> using make_unsigned_t = typename make_unsigned<T>::type;
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
struct make_signed
|
struct make_signed // NOLINT(readability-identifier-naming)
|
||||||
{
|
{
|
||||||
typedef std::make_signed_t<T> type;
|
using type = std::make_signed_t<T>;
|
||||||
};
|
};
|
||||||
|
|
||||||
template <> struct make_signed<Int128> { using type = Int128; };
|
template <> struct make_signed<Int128> { using type = Int128; };
|
||||||
@ -92,7 +91,7 @@ template <> struct make_signed<UInt256> { using type = Int256; };
|
|||||||
template <typename T> using make_signed_t = typename make_signed<T>::type;
|
template <typename T> using make_signed_t = typename make_signed<T>::type;
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
struct is_big_int
|
struct is_big_int // NOLINT(readability-identifier-naming)
|
||||||
{
|
{
|
||||||
static constexpr bool value = false;
|
static constexpr bool value = false;
|
||||||
};
|
};
|
||||||
@ -104,4 +103,3 @@ template <> struct is_big_int<UInt256> { static constexpr bool value = true; };
|
|||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
inline constexpr bool is_big_int_v = is_big_int<T>::value;
|
inline constexpr bool is_big_int_v = is_big_int<T>::value;
|
||||||
|
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
#include <cstdint>
|
#include <cstdint>
|
||||||
#include <string>
|
#include <string>
|
||||||
|
#include <array>
|
||||||
|
|
||||||
#if defined(__SSE2__)
|
#if defined(__SSE2__)
|
||||||
#include <emmintrin.h>
|
#include <emmintrin.h>
|
||||||
@ -15,7 +16,7 @@
|
|||||||
*
|
*
|
||||||
* Allow to search for next character from the set of 'symbols...' in a string.
|
* Allow to search for next character from the set of 'symbols...' in a string.
|
||||||
* It is similar to 'strpbrk', 'strcspn' (and 'strchr', 'memchr' in the case of one symbol and '\0'),
|
* It is similar to 'strpbrk', 'strcspn' (and 'strchr', 'memchr' in the case of one symbol and '\0'),
|
||||||
* but with the following differencies:
|
* but with the following differences:
|
||||||
* - works with any memory ranges, including containing zero bytes;
|
* - works with any memory ranges, including containing zero bytes;
|
||||||
* - doesn't require terminating zero byte: end of memory range is passed explicitly;
|
* - doesn't require terminating zero byte: end of memory range is passed explicitly;
|
||||||
* - if not found, returns pointer to end instead of nullptr;
|
* - if not found, returns pointer to end instead of nullptr;
|
||||||
@ -34,9 +35,51 @@
|
|||||||
* If no such characters, returns nullptr.
|
* If no such characters, returns nullptr.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
struct SearchSymbols
|
||||||
|
{
|
||||||
|
static constexpr auto BUFFER_SIZE = 16;
|
||||||
|
|
||||||
|
SearchSymbols() = default;
|
||||||
|
|
||||||
|
explicit SearchSymbols(std::string in)
|
||||||
|
: str(std::move(in))
|
||||||
|
{
|
||||||
|
#if defined(__SSE4_2__)
|
||||||
|
if (str.size() > BUFFER_SIZE)
|
||||||
|
{
|
||||||
|
throw std::runtime_error("SearchSymbols can contain at most " + std::to_string(BUFFER_SIZE) + " symbols and " + std::to_string(str.size()) + " was provided\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
char tmp_safety_buffer[BUFFER_SIZE] = {0};
|
||||||
|
|
||||||
|
memcpy(tmp_safety_buffer, str.data(), str.size());
|
||||||
|
|
||||||
|
simd_vector = _mm_loadu_si128(reinterpret_cast<const __m128i *>(tmp_safety_buffer));
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
#if defined(__SSE4_2__)
|
||||||
|
__m128i simd_vector;
|
||||||
|
#endif
|
||||||
|
std::string str;
|
||||||
|
};
|
||||||
|
|
||||||
namespace detail
|
namespace detail
|
||||||
{
|
{
|
||||||
template <char ...chars> constexpr bool is_in(char x) { return ((x == chars) || ...); }
|
template <char ...chars> constexpr bool is_in(char x) { return ((x == chars) || ...); } // NOLINT(misc-redundant-expression)
|
||||||
|
|
||||||
|
static bool is_in(char c, const char * symbols, size_t num_chars)
|
||||||
|
{
|
||||||
|
for (size_t i = 0u; i < num_chars; ++i)
|
||||||
|
{
|
||||||
|
if (c == symbols[i])
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
#if defined(__SSE2__)
|
#if defined(__SSE2__)
|
||||||
template <char s0>
|
template <char s0>
|
||||||
@ -53,6 +96,43 @@ inline __m128i mm_is_in(__m128i bytes)
|
|||||||
__m128i eq = mm_is_in<s1, tail...>(bytes);
|
__m128i eq = mm_is_in<s1, tail...>(bytes);
|
||||||
return _mm_or_si128(eq0, eq);
|
return _mm_or_si128(eq0, eq);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
inline __m128i mm_is_in(__m128i bytes, const char * symbols, size_t num_chars)
|
||||||
|
{
|
||||||
|
__m128i accumulator = _mm_setzero_si128();
|
||||||
|
for (size_t i = 0; i < num_chars; ++i)
|
||||||
|
{
|
||||||
|
__m128i eq = _mm_cmpeq_epi8(bytes, _mm_set1_epi8(symbols[i]));
|
||||||
|
accumulator = _mm_or_si128(accumulator, eq);
|
||||||
|
}
|
||||||
|
|
||||||
|
return accumulator;
|
||||||
|
}
|
||||||
|
|
||||||
|
inline std::array<__m128i, 16u> mm_is_in_prepare(const char * symbols, size_t num_chars)
|
||||||
|
{
|
||||||
|
std::array<__m128i, 16u> result {};
|
||||||
|
|
||||||
|
for (size_t i = 0; i < num_chars; ++i)
|
||||||
|
{
|
||||||
|
result[i] = _mm_set1_epi8(symbols[i]);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
inline __m128i mm_is_in_execute(__m128i bytes, const std::array<__m128i, 16u> & needles)
|
||||||
|
{
|
||||||
|
__m128i accumulator = _mm_setzero_si128();
|
||||||
|
|
||||||
|
for (const auto & needle : needles)
|
||||||
|
{
|
||||||
|
__m128i eq = _mm_cmpeq_epi8(bytes, needle);
|
||||||
|
accumulator = _mm_or_si128(accumulator, eq);
|
||||||
|
}
|
||||||
|
|
||||||
|
return accumulator;
|
||||||
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
template <bool positive>
|
template <bool positive>
|
||||||
@ -99,6 +179,32 @@ inline const char * find_first_symbols_sse2(const char * const begin, const char
|
|||||||
return return_mode == ReturnMode::End ? end : nullptr;
|
return return_mode == ReturnMode::End ? end : nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template <bool positive, ReturnMode return_mode>
|
||||||
|
inline const char * find_first_symbols_sse2(const char * const begin, const char * const end, const char * symbols, size_t num_chars)
|
||||||
|
{
|
||||||
|
const char * pos = begin;
|
||||||
|
|
||||||
|
#if defined(__SSE2__)
|
||||||
|
const auto needles = mm_is_in_prepare(symbols, num_chars);
|
||||||
|
for (; pos + 15 < end; pos += 16)
|
||||||
|
{
|
||||||
|
__m128i bytes = _mm_loadu_si128(reinterpret_cast<const __m128i *>(pos));
|
||||||
|
|
||||||
|
__m128i eq = mm_is_in_execute(bytes, needles);
|
||||||
|
|
||||||
|
uint16_t bit_mask = maybe_negate<positive>(uint16_t(_mm_movemask_epi8(eq)));
|
||||||
|
if (bit_mask)
|
||||||
|
return pos + __builtin_ctz(bit_mask);
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
for (; pos < end; ++pos)
|
||||||
|
if (maybe_negate<positive>(is_in(*pos, symbols, num_chars)))
|
||||||
|
return pos;
|
||||||
|
|
||||||
|
return return_mode == ReturnMode::End ? end : nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
template <bool positive, ReturnMode return_mode, char... symbols>
|
template <bool positive, ReturnMode return_mode, char... symbols>
|
||||||
inline const char * find_last_symbols_sse2(const char * const begin, const char * const end)
|
inline const char * find_last_symbols_sse2(const char * const begin, const char * const end)
|
||||||
@ -159,26 +265,61 @@ inline const char * find_first_symbols_sse42(const char * const begin, const cha
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
for (; pos < end; ++pos)
|
for (; pos < end; ++pos)
|
||||||
if ( (num_chars >= 1 && maybe_negate<positive>(*pos == c01))
|
if ( (num_chars == 1 && maybe_negate<positive>(is_in<c01>(*pos)))
|
||||||
|| (num_chars >= 2 && maybe_negate<positive>(*pos == c02))
|
|| (num_chars == 2 && maybe_negate<positive>(is_in<c01, c02>(*pos)))
|
||||||
|| (num_chars >= 3 && maybe_negate<positive>(*pos == c03))
|
|| (num_chars == 3 && maybe_negate<positive>(is_in<c01, c02, c03>(*pos)))
|
||||||
|| (num_chars >= 4 && maybe_negate<positive>(*pos == c04))
|
|| (num_chars == 4 && maybe_negate<positive>(is_in<c01, c02, c03, c04>(*pos)))
|
||||||
|| (num_chars >= 5 && maybe_negate<positive>(*pos == c05))
|
|| (num_chars == 5 && maybe_negate<positive>(is_in<c01, c02, c03, c04, c05>(*pos)))
|
||||||
|| (num_chars >= 6 && maybe_negate<positive>(*pos == c06))
|
|| (num_chars == 6 && maybe_negate<positive>(is_in<c01, c02, c03, c04, c05, c06>(*pos)))
|
||||||
|| (num_chars >= 7 && maybe_negate<positive>(*pos == c07))
|
|| (num_chars == 7 && maybe_negate<positive>(is_in<c01, c02, c03, c04, c05, c06, c07>(*pos)))
|
||||||
|| (num_chars >= 8 && maybe_negate<positive>(*pos == c08))
|
|| (num_chars == 8 && maybe_negate<positive>(is_in<c01, c02, c03, c04, c05, c06, c07, c08>(*pos)))
|
||||||
|| (num_chars >= 9 && maybe_negate<positive>(*pos == c09))
|
|| (num_chars == 9 && maybe_negate<positive>(is_in<c01, c02, c03, c04, c05, c06, c07, c08, c09>(*pos)))
|
||||||
|| (num_chars >= 10 && maybe_negate<positive>(*pos == c10))
|
|| (num_chars == 10 && maybe_negate<positive>(is_in<c01, c02, c03, c04, c05, c06, c07, c08, c09, c10>(*pos)))
|
||||||
|| (num_chars >= 11 && maybe_negate<positive>(*pos == c11))
|
|| (num_chars == 11 && maybe_negate<positive>(is_in<c01, c02, c03, c04, c05, c06, c07, c08, c09, c10, c11>(*pos)))
|
||||||
|| (num_chars >= 12 && maybe_negate<positive>(*pos == c12))
|
|| (num_chars == 12 && maybe_negate<positive>(is_in<c01, c02, c03, c04, c05, c06, c07, c08, c09, c10, c11, c12>(*pos)))
|
||||||
|| (num_chars >= 13 && maybe_negate<positive>(*pos == c13))
|
|| (num_chars == 13 && maybe_negate<positive>(is_in<c01, c02, c03, c04, c05, c06, c07, c08, c09, c10, c11, c12, c13>(*pos)))
|
||||||
|| (num_chars >= 14 && maybe_negate<positive>(*pos == c14))
|
|| (num_chars == 14 && maybe_negate<positive>(is_in<c01, c02, c03, c04, c05, c06, c07, c08, c09, c10, c11, c12, c13, c14>(*pos)))
|
||||||
|| (num_chars >= 15 && maybe_negate<positive>(*pos == c15))
|
|| (num_chars == 15 && maybe_negate<positive>(is_in<c01, c02, c03, c04, c05, c06, c07, c08, c09, c10, c11, c12, c13, c14, c15>(*pos)))
|
||||||
|| (num_chars >= 16 && maybe_negate<positive>(*pos == c16)))
|
|| (num_chars == 16 && maybe_negate<positive>(is_in<c01, c02, c03, c04, c05, c06, c07, c08, c09, c10, c11, c12, c13, c14, c15, c16>(*pos))))
|
||||||
return pos;
|
return pos;
|
||||||
return return_mode == ReturnMode::End ? end : nullptr;
|
return return_mode == ReturnMode::End ? end : nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template <bool positive, ReturnMode return_mode>
|
||||||
|
inline const char * find_first_symbols_sse42(const char * const begin, const char * const end, const SearchSymbols & symbols)
|
||||||
|
{
|
||||||
|
const char * pos = begin;
|
||||||
|
|
||||||
|
const auto num_chars = symbols.str.size();
|
||||||
|
|
||||||
|
#if defined(__SSE4_2__)
|
||||||
|
constexpr int mode = _SIDD_UBYTE_OPS | _SIDD_CMP_EQUAL_ANY | _SIDD_LEAST_SIGNIFICANT;
|
||||||
|
|
||||||
|
const __m128i set = symbols.simd_vector;
|
||||||
|
|
||||||
|
for (; pos + 15 < end; pos += 16)
|
||||||
|
{
|
||||||
|
__m128i bytes = _mm_loadu_si128(reinterpret_cast<const __m128i *>(pos));
|
||||||
|
|
||||||
|
if constexpr (positive)
|
||||||
|
{
|
||||||
|
if (_mm_cmpestrc(set, num_chars, bytes, 16, mode))
|
||||||
|
return pos + _mm_cmpestri(set, num_chars, bytes, 16, mode);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
if (_mm_cmpestrc(set, num_chars, bytes, 16, mode | _SIDD_NEGATIVE_POLARITY))
|
||||||
|
return pos + _mm_cmpestri(set, num_chars, bytes, 16, mode | _SIDD_NEGATIVE_POLARITY);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
for (; pos < end; ++pos)
|
||||||
|
if (maybe_negate<positive>(is_in(*pos, symbols.str.data(), num_chars)))
|
||||||
|
return pos;
|
||||||
|
|
||||||
|
return return_mode == ReturnMode::End ? end : nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
/// NOTE No SSE 4.2 implementation for find_last_symbols_or_null. Not worth to do.
|
/// NOTE No SSE 4.2 implementation for find_last_symbols_or_null. Not worth to do.
|
||||||
|
|
||||||
@ -194,6 +335,17 @@ inline const char * find_first_symbols_dispatch(const char * begin, const char *
|
|||||||
return find_first_symbols_sse2<positive, return_mode, symbols...>(begin, end);
|
return find_first_symbols_sse2<positive, return_mode, symbols...>(begin, end);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template <bool positive, ReturnMode return_mode>
|
||||||
|
inline const char * find_first_symbols_dispatch(const std::string_view haystack, const SearchSymbols & symbols)
|
||||||
|
{
|
||||||
|
#if defined(__SSE4_2__)
|
||||||
|
if (symbols.str.size() >= 5)
|
||||||
|
return find_first_symbols_sse42<positive, return_mode>(haystack.begin(), haystack.end(), symbols);
|
||||||
|
else
|
||||||
|
#endif
|
||||||
|
return find_first_symbols_sse2<positive, return_mode>(haystack.begin(), haystack.end(), symbols.str.data(), symbols.str.size());
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -211,6 +363,11 @@ inline char * find_first_symbols(char * begin, char * end)
|
|||||||
return const_cast<char *>(detail::find_first_symbols_dispatch<true, detail::ReturnMode::End, symbols...>(begin, end));
|
return const_cast<char *>(detail::find_first_symbols_dispatch<true, detail::ReturnMode::End, symbols...>(begin, end));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
inline const char * find_first_symbols(std::string_view haystack, const SearchSymbols & symbols)
|
||||||
|
{
|
||||||
|
return detail::find_first_symbols_dispatch<true, detail::ReturnMode::End>(haystack, symbols);
|
||||||
|
}
|
||||||
|
|
||||||
template <char... symbols>
|
template <char... symbols>
|
||||||
inline const char * find_first_not_symbols(const char * begin, const char * end)
|
inline const char * find_first_not_symbols(const char * begin, const char * end)
|
||||||
{
|
{
|
||||||
@ -223,6 +380,11 @@ inline char * find_first_not_symbols(char * begin, char * end)
|
|||||||
return const_cast<char *>(detail::find_first_symbols_dispatch<false, detail::ReturnMode::End, symbols...>(begin, end));
|
return const_cast<char *>(detail::find_first_symbols_dispatch<false, detail::ReturnMode::End, symbols...>(begin, end));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
inline const char * find_first_not_symbols(std::string_view haystack, const SearchSymbols & symbols)
|
||||||
|
{
|
||||||
|
return detail::find_first_symbols_dispatch<false, detail::ReturnMode::End>(haystack, symbols);
|
||||||
|
}
|
||||||
|
|
||||||
template <char... symbols>
|
template <char... symbols>
|
||||||
inline const char * find_first_symbols_or_null(const char * begin, const char * end)
|
inline const char * find_first_symbols_or_null(const char * begin, const char * end)
|
||||||
{
|
{
|
||||||
@ -235,6 +397,11 @@ inline char * find_first_symbols_or_null(char * begin, char * end)
|
|||||||
return const_cast<char *>(detail::find_first_symbols_dispatch<true, detail::ReturnMode::Nullptr, symbols...>(begin, end));
|
return const_cast<char *>(detail::find_first_symbols_dispatch<true, detail::ReturnMode::Nullptr, symbols...>(begin, end));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
inline const char * find_first_symbols_or_null(std::string_view haystack, const SearchSymbols & symbols)
|
||||||
|
{
|
||||||
|
return detail::find_first_symbols_dispatch<true, detail::ReturnMode::Nullptr>(haystack, symbols);
|
||||||
|
}
|
||||||
|
|
||||||
template <char... symbols>
|
template <char... symbols>
|
||||||
inline const char * find_first_not_symbols_or_null(const char * begin, const char * end)
|
inline const char * find_first_not_symbols_or_null(const char * begin, const char * end)
|
||||||
{
|
{
|
||||||
@ -247,6 +414,10 @@ inline char * find_first_not_symbols_or_null(char * begin, char * end)
|
|||||||
return const_cast<char *>(detail::find_first_symbols_dispatch<false, detail::ReturnMode::Nullptr, symbols...>(begin, end));
|
return const_cast<char *>(detail::find_first_symbols_dispatch<false, detail::ReturnMode::Nullptr, symbols...>(begin, end));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
inline const char * find_first_not_symbols_or_null(std::string_view haystack, const SearchSymbols & symbols)
|
||||||
|
{
|
||||||
|
return detail::find_first_symbols_dispatch<false, detail::ReturnMode::Nullptr>(haystack, symbols);
|
||||||
|
}
|
||||||
|
|
||||||
template <char... symbols>
|
template <char... symbols>
|
||||||
inline const char * find_last_symbols_or_null(const char * begin, const char * end)
|
inline const char * find_last_symbols_or_null(const char * begin, const char * end)
|
||||||
@ -277,7 +448,7 @@ inline char * find_last_not_symbols_or_null(char * begin, char * end)
|
|||||||
/// See https://github.com/boostorg/algorithm/issues/63
|
/// See https://github.com/boostorg/algorithm/issues/63
|
||||||
/// And https://bugs.llvm.org/show_bug.cgi?id=41141
|
/// And https://bugs.llvm.org/show_bug.cgi?id=41141
|
||||||
template <char... symbols, typename To>
|
template <char... symbols, typename To>
|
||||||
inline void splitInto(To & to, const std::string & what, bool token_compress = false)
|
inline To & splitInto(To & to, std::string_view what, bool token_compress = false)
|
||||||
{
|
{
|
||||||
const char * pos = what.data();
|
const char * pos = what.data();
|
||||||
const char * end = pos + what.size();
|
const char * end = pos + what.size();
|
||||||
@ -293,4 +464,6 @@ inline void splitInto(To & to, const std::string & what, bool token_compress = f
|
|||||||
else
|
else
|
||||||
pos = delimiter_or_end;
|
pos = delimiter_or_end;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return to;
|
||||||
}
|
}
|
||||||
|
@ -16,7 +16,7 @@ uint64_t getAvailableMemoryAmountOrZero()
|
|||||||
{
|
{
|
||||||
#if defined(_SC_PHYS_PAGES) // linux
|
#if defined(_SC_PHYS_PAGES) // linux
|
||||||
return getPageSize() * sysconf(_SC_PHYS_PAGES);
|
return getPageSize() * sysconf(_SC_PHYS_PAGES);
|
||||||
#elif defined(__FreeBSD__)
|
#elif defined(OS_FREEBSD)
|
||||||
struct vmtotal vmt;
|
struct vmtotal vmt;
|
||||||
size_t vmt_size = sizeof(vmt);
|
size_t vmt_size = sizeof(vmt);
|
||||||
if (sysctlbyname("vm.vmtotal", &vmt, &vmt_size, NULL, 0) == 0)
|
if (sysctlbyname("vm.vmtotal", &vmt, &vmt_size, NULL, 0) == 0)
|
||||||
|
@ -28,14 +28,28 @@ uint64_t getMemoryAmountOrZero()
|
|||||||
|
|
||||||
#if defined(OS_LINUX)
|
#if defined(OS_LINUX)
|
||||||
// Try to lookup at the Cgroup limit
|
// Try to lookup at the Cgroup limit
|
||||||
std::ifstream cgroup_limit("/sys/fs/cgroup/memory/memory.limit_in_bytes");
|
|
||||||
if (cgroup_limit.is_open())
|
// CGroups v2
|
||||||
|
std::ifstream cgroupv2_limit("/sys/fs/cgroup/memory.max");
|
||||||
|
if (cgroupv2_limit.is_open())
|
||||||
{
|
{
|
||||||
uint64_t memory_limit = 0; // in case of read error
|
uint64_t memory_limit = 0;
|
||||||
cgroup_limit >> memory_limit;
|
cgroupv2_limit >> memory_limit;
|
||||||
if (memory_limit > 0 && memory_limit < memory_amount)
|
if (memory_limit > 0 && memory_limit < memory_amount)
|
||||||
memory_amount = memory_limit;
|
memory_amount = memory_limit;
|
||||||
}
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// CGroups v1
|
||||||
|
std::ifstream cgroup_limit("/sys/fs/cgroup/memory/memory.limit_in_bytes");
|
||||||
|
if (cgroup_limit.is_open())
|
||||||
|
{
|
||||||
|
uint64_t memory_limit = 0; // in case of read error
|
||||||
|
cgroup_limit >> memory_limit;
|
||||||
|
if (memory_limit > 0 && memory_limit < memory_amount)
|
||||||
|
memory_amount = memory_limit;
|
||||||
|
}
|
||||||
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
return memory_amount;
|
return memory_amount;
|
||||||
|
@ -15,25 +15,34 @@
|
|||||||
|
|
||||||
|
|
||||||
static thread_local uint64_t current_tid = 0;
|
static thread_local uint64_t current_tid = 0;
|
||||||
|
|
||||||
|
static void setCurrentThreadId()
|
||||||
|
{
|
||||||
|
#if defined(OS_ANDROID)
|
||||||
|
current_tid = gettid();
|
||||||
|
#elif defined(OS_LINUX)
|
||||||
|
current_tid = static_cast<uint64_t>(syscall(SYS_gettid)); /// This call is always successful. - man gettid
|
||||||
|
#elif defined(OS_FREEBSD)
|
||||||
|
current_tid = pthread_getthreadid_np();
|
||||||
|
#elif defined(OS_SUNOS)
|
||||||
|
// On Solaris-derived systems, this returns the ID of the LWP, analogous
|
||||||
|
// to a thread.
|
||||||
|
current_tid = static_cast<uint64_t>(pthread_self());
|
||||||
|
#else
|
||||||
|
if (0 != pthread_threadid_np(nullptr, ¤t_tid))
|
||||||
|
throw std::logic_error("pthread_threadid_np returned error");
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
uint64_t getThreadId()
|
uint64_t getThreadId()
|
||||||
{
|
{
|
||||||
if (!current_tid)
|
if (!current_tid)
|
||||||
{
|
setCurrentThreadId();
|
||||||
#if defined(OS_ANDROID)
|
|
||||||
current_tid = gettid();
|
|
||||||
#elif defined(OS_LINUX)
|
|
||||||
current_tid = syscall(SYS_gettid); /// This call is always successful. - man gettid
|
|
||||||
#elif defined(OS_FREEBSD)
|
|
||||||
current_tid = pthread_getthreadid_np();
|
|
||||||
#elif defined(OS_SUNOS)
|
|
||||||
// On Solaris-derived systems, this returns the ID of the LWP, analogous
|
|
||||||
// to a thread.
|
|
||||||
current_tid = static_cast<uint64_t>(pthread_self());
|
|
||||||
#else
|
|
||||||
if (0 != pthread_threadid_np(nullptr, ¤t_tid))
|
|
||||||
throw std::logic_error("pthread_threadid_np returned error");
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
return current_tid;
|
return current_tid;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void updateCurrentThreadIdAfterFork()
|
||||||
|
{
|
||||||
|
setCurrentThreadId();
|
||||||
|
}
|
||||||
|
@ -3,3 +3,5 @@
|
|||||||
|
|
||||||
/// Obtain thread id from OS. The value is cached in thread local variable.
|
/// Obtain thread id from OS. The value is cached in thread local variable.
|
||||||
uint64_t getThreadId();
|
uint64_t getThreadId();
|
||||||
|
|
||||||
|
void updateCurrentThreadIdAfterFork();
|
||||||
|
301
base/base/hex.h
Normal file
301
base/base/hex.h
Normal file
@ -0,0 +1,301 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <bit>
|
||||||
|
#include <cstring>
|
||||||
|
#include "types.h"
|
||||||
|
|
||||||
|
namespace CityHash_v1_0_2 { struct uint128; }
|
||||||
|
|
||||||
|
namespace wide
|
||||||
|
{
|
||||||
|
template <size_t Bits, typename Signed>
|
||||||
|
class integer;
|
||||||
|
}
|
||||||
|
|
||||||
|
namespace impl
|
||||||
|
{
|
||||||
|
/// Maps 0..15 to 0..9A..F or 0..9a..f correspondingly.
|
||||||
|
constexpr inline std::string_view hex_digit_to_char_uppercase_table = "0123456789ABCDEF";
|
||||||
|
constexpr inline std::string_view hex_digit_to_char_lowercase_table = "0123456789abcdef";
|
||||||
|
|
||||||
|
/// Maps 0..255 to 00..FF or 00..ff correspondingly.
|
||||||
|
constexpr inline std::string_view hex_byte_to_char_uppercase_table = //
|
||||||
|
"000102030405060708090A0B0C0D0E0F"
|
||||||
|
"101112131415161718191A1B1C1D1E1F"
|
||||||
|
"202122232425262728292A2B2C2D2E2F"
|
||||||
|
"303132333435363738393A3B3C3D3E3F"
|
||||||
|
"404142434445464748494A4B4C4D4E4F"
|
||||||
|
"505152535455565758595A5B5C5D5E5F"
|
||||||
|
"606162636465666768696A6B6C6D6E6F"
|
||||||
|
"707172737475767778797A7B7C7D7E7F"
|
||||||
|
"808182838485868788898A8B8C8D8E8F"
|
||||||
|
"909192939495969798999A9B9C9D9E9F"
|
||||||
|
"A0A1A2A3A4A5A6A7A8A9AAABACADAEAF"
|
||||||
|
"B0B1B2B3B4B5B6B7B8B9BABBBCBDBEBF"
|
||||||
|
"C0C1C2C3C4C5C6C7C8C9CACBCCCDCECF"
|
||||||
|
"D0D1D2D3D4D5D6D7D8D9DADBDCDDDEDF"
|
||||||
|
"E0E1E2E3E4E5E6E7E8E9EAEBECEDEEEF"
|
||||||
|
"F0F1F2F3F4F5F6F7F8F9FAFBFCFDFEFF";
|
||||||
|
|
||||||
|
constexpr inline std::string_view hex_byte_to_char_lowercase_table = //
|
||||||
|
"000102030405060708090a0b0c0d0e0f"
|
||||||
|
"101112131415161718191a1b1c1d1e1f"
|
||||||
|
"202122232425262728292a2b2c2d2e2f"
|
||||||
|
"303132333435363738393a3b3c3d3e3f"
|
||||||
|
"404142434445464748494a4b4c4d4e4f"
|
||||||
|
"505152535455565758595a5b5c5d5e5f"
|
||||||
|
"606162636465666768696a6b6c6d6e6f"
|
||||||
|
"707172737475767778797a7b7c7d7e7f"
|
||||||
|
"808182838485868788898a8b8c8d8e8f"
|
||||||
|
"909192939495969798999a9b9c9d9e9f"
|
||||||
|
"a0a1a2a3a4a5a6a7a8a9aaabacadaeaf"
|
||||||
|
"b0b1b2b3b4b5b6b7b8b9babbbcbdbebf"
|
||||||
|
"c0c1c2c3c4c5c6c7c8c9cacbcccdcecf"
|
||||||
|
"d0d1d2d3d4d5d6d7d8d9dadbdcdddedf"
|
||||||
|
"e0e1e2e3e4e5e6e7e8e9eaebecedeeef"
|
||||||
|
"f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff";
|
||||||
|
|
||||||
|
/// Maps 0..255 to 00000000..11111111 correspondingly.
|
||||||
|
constexpr inline std::string_view bin_byte_to_char_table = //
|
||||||
|
"0000000000000001000000100000001100000100000001010000011000000111"
|
||||||
|
"0000100000001001000010100000101100001100000011010000111000001111"
|
||||||
|
"0001000000010001000100100001001100010100000101010001011000010111"
|
||||||
|
"0001100000011001000110100001101100011100000111010001111000011111"
|
||||||
|
"0010000000100001001000100010001100100100001001010010011000100111"
|
||||||
|
"0010100000101001001010100010101100101100001011010010111000101111"
|
||||||
|
"0011000000110001001100100011001100110100001101010011011000110111"
|
||||||
|
"0011100000111001001110100011101100111100001111010011111000111111"
|
||||||
|
"0100000001000001010000100100001101000100010001010100011001000111"
|
||||||
|
"0100100001001001010010100100101101001100010011010100111001001111"
|
||||||
|
"0101000001010001010100100101001101010100010101010101011001010111"
|
||||||
|
"0101100001011001010110100101101101011100010111010101111001011111"
|
||||||
|
"0110000001100001011000100110001101100100011001010110011001100111"
|
||||||
|
"0110100001101001011010100110101101101100011011010110111001101111"
|
||||||
|
"0111000001110001011100100111001101110100011101010111011001110111"
|
||||||
|
"0111100001111001011110100111101101111100011111010111111001111111"
|
||||||
|
"1000000010000001100000101000001110000100100001011000011010000111"
|
||||||
|
"1000100010001001100010101000101110001100100011011000111010001111"
|
||||||
|
"1001000010010001100100101001001110010100100101011001011010010111"
|
||||||
|
"1001100010011001100110101001101110011100100111011001111010011111"
|
||||||
|
"1010000010100001101000101010001110100100101001011010011010100111"
|
||||||
|
"1010100010101001101010101010101110101100101011011010111010101111"
|
||||||
|
"1011000010110001101100101011001110110100101101011011011010110111"
|
||||||
|
"1011100010111001101110101011101110111100101111011011111010111111"
|
||||||
|
"1100000011000001110000101100001111000100110001011100011011000111"
|
||||||
|
"1100100011001001110010101100101111001100110011011100111011001111"
|
||||||
|
"1101000011010001110100101101001111010100110101011101011011010111"
|
||||||
|
"1101100011011001110110101101101111011100110111011101111011011111"
|
||||||
|
"1110000011100001111000101110001111100100111001011110011011100111"
|
||||||
|
"1110100011101001111010101110101111101100111011011110111011101111"
|
||||||
|
"1111000011110001111100101111001111110100111101011111011011110111"
|
||||||
|
"1111100011111001111110101111101111111100111111011111111011111111";
|
||||||
|
|
||||||
|
/// Maps 0..9, A..F, a..f to 0..15. Other chars are mapped to implementation specific value.
|
||||||
|
constexpr inline std::string_view hex_char_to_digit_table
|
||||||
|
= {"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||||
|
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||||
|
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||||
|
"\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\xff\xff\xff\xff\xff\xff" //0-9
|
||||||
|
"\xff\x0a\x0b\x0c\x0d\x0e\x0f\xff\xff\xff\xff\xff\xff\xff\xff\xff" //A-Z
|
||||||
|
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||||
|
"\xff\x0a\x0b\x0c\x0d\x0e\x0f\xff\xff\xff\xff\xff\xff\xff\xff\xff" //a-z
|
||||||
|
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||||
|
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||||
|
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||||
|
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||||
|
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||||
|
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||||
|
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||||
|
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||||
|
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff",
|
||||||
|
256};
|
||||||
|
|
||||||
|
/// Converts a hex digit '0'..'f' or '0'..'F' to its value 0..15.
|
||||||
|
constexpr UInt8 unhexDigit(char c)
|
||||||
|
{
|
||||||
|
return hex_char_to_digit_table[static_cast<UInt8>(c)];
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts an unsigned integer in the native endian to hexadecimal representation and back. Used as a base class for HexConversion<T>.
|
||||||
|
template <typename TUInt, typename = void>
|
||||||
|
struct HexConversionUInt
|
||||||
|
{
|
||||||
|
static const constexpr size_t num_hex_digits = sizeof(TUInt) * 2;
|
||||||
|
|
||||||
|
static void hex(TUInt uint_, char * out, std::string_view table)
|
||||||
|
{
|
||||||
|
union
|
||||||
|
{
|
||||||
|
TUInt value;
|
||||||
|
UInt8 uint8[sizeof(TUInt)];
|
||||||
|
};
|
||||||
|
|
||||||
|
value = uint_;
|
||||||
|
|
||||||
|
for (size_t i = 0; i < sizeof(TUInt); ++i)
|
||||||
|
{
|
||||||
|
if constexpr (std::endian::native == std::endian::little)
|
||||||
|
memcpy(out + i * 2, &table[static_cast<size_t>(uint8[sizeof(TUInt) - 1 - i]) * 2], 2);
|
||||||
|
else
|
||||||
|
memcpy(out + i * 2, &table[static_cast<size_t>(uint8[i]) * 2], 2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static TUInt unhex(const char * data)
|
||||||
|
{
|
||||||
|
TUInt res;
|
||||||
|
if constexpr (sizeof(TUInt) == 1)
|
||||||
|
{
|
||||||
|
res = static_cast<UInt8>(unhexDigit(data[0])) * 0x10 + static_cast<UInt8>(unhexDigit(data[1]));
|
||||||
|
}
|
||||||
|
else if constexpr (sizeof(TUInt) == 2)
|
||||||
|
{
|
||||||
|
res = static_cast<UInt16>(unhexDigit(data[0])) * 0x1000 + static_cast<UInt16>(unhexDigit(data[1])) * 0x100
|
||||||
|
+ static_cast<UInt16>(unhexDigit(data[2])) * 0x10 + static_cast<UInt16>(unhexDigit(data[3]));
|
||||||
|
}
|
||||||
|
else if constexpr ((sizeof(TUInt) <= 8) || ((sizeof(TUInt) % 8) != 0))
|
||||||
|
{
|
||||||
|
res = 0;
|
||||||
|
for (size_t i = 0; i < sizeof(TUInt) * 2; ++i, ++data)
|
||||||
|
{
|
||||||
|
res <<= 4;
|
||||||
|
res += unhexDigit(*data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
res = 0;
|
||||||
|
for (size_t i = 0; i < sizeof(TUInt) / 8; ++i, data += 16)
|
||||||
|
{
|
||||||
|
res <<= 64;
|
||||||
|
res += HexConversionUInt<UInt64>::unhex(data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Helper template class to convert a value of any supported type to hexadecimal representation and back.
|
||||||
|
template <typename T, typename SFINAE = void>
|
||||||
|
struct HexConversion;
|
||||||
|
|
||||||
|
template <typename TUInt>
|
||||||
|
struct HexConversion<TUInt, std::enable_if_t<std::is_integral_v<TUInt>>> : public HexConversionUInt<TUInt> {};
|
||||||
|
|
||||||
|
template <size_t Bits, typename Signed>
|
||||||
|
struct HexConversion<wide::integer<Bits, Signed>> : public HexConversionUInt<wide::integer<Bits, Signed>> {};
|
||||||
|
|
||||||
|
template <typename CityHashUInt128> /// Partial specialization here allows not to include <city.h> in this header.
|
||||||
|
struct HexConversion<CityHashUInt128, std::enable_if_t<std::is_same_v<CityHashUInt128, typename CityHash_v1_0_2::uint128>>>
|
||||||
|
{
|
||||||
|
static const constexpr size_t num_hex_digits = 32;
|
||||||
|
|
||||||
|
static void hex(const CityHashUInt128 & uint_, char * out, std::string_view table)
|
||||||
|
{
|
||||||
|
HexConversion<UInt64>::hex(uint_.high64, out, table);
|
||||||
|
HexConversion<UInt64>::hex(uint_.low64, out + 16, table);
|
||||||
|
}
|
||||||
|
|
||||||
|
static CityHashUInt128 unhex(const char * data)
|
||||||
|
{
|
||||||
|
CityHashUInt128 res;
|
||||||
|
res.high64 = HexConversion<UInt64>::unhex(data);
|
||||||
|
res.low64 = HexConversion<UInt64>::unhex(data + 16);
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Produces a hexadecimal representation of an integer value with leading zeros (for checksums).
|
||||||
|
/// The function supports native integer types, wide::integer, CityHash_v1_0_2::uint128.
|
||||||
|
/// It can be used with signed types as well, however they are written as corresponding unsigned numbers
|
||||||
|
/// using two's complement (i.e. for example "-1" is written as "0xFF", not as "-0x01").
|
||||||
|
template <typename T>
|
||||||
|
void writeHexUIntUppercase(const T & value, char * out)
|
||||||
|
{
|
||||||
|
impl::HexConversion<T>::hex(value, out, impl::hex_byte_to_char_uppercase_table);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
void writeHexUIntLowercase(const T & value, char * out)
|
||||||
|
{
|
||||||
|
impl::HexConversion<T>::hex(value, out, impl::hex_byte_to_char_lowercase_table);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
std::string getHexUIntUppercase(const T & value)
|
||||||
|
{
|
||||||
|
std::string res(impl::HexConversion<T>::num_hex_digits, '\0');
|
||||||
|
writeHexUIntUppercase(value, res.data());
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
std::string getHexUIntLowercase(const T & value)
|
||||||
|
{
|
||||||
|
std::string res(impl::HexConversion<T>::num_hex_digits, '\0');
|
||||||
|
writeHexUIntLowercase(value, res.data());
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
constexpr char hexDigitUppercase(unsigned char c)
|
||||||
|
{
|
||||||
|
return impl::hex_digit_to_char_uppercase_table[c];
|
||||||
|
}
|
||||||
|
|
||||||
|
constexpr char hexDigitLowercase(unsigned char c)
|
||||||
|
{
|
||||||
|
return impl::hex_digit_to_char_lowercase_table[c];
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void writeHexByteUppercase(UInt8 byte, void * out)
|
||||||
|
{
|
||||||
|
memcpy(out, &impl::hex_byte_to_char_uppercase_table[static_cast<size_t>(byte) * 2], 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void writeHexByteLowercase(UInt8 byte, void * out)
|
||||||
|
{
|
||||||
|
memcpy(out, &impl::hex_byte_to_char_lowercase_table[static_cast<size_t>(byte) * 2], 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts a hex representation with leading zeros back to an integer value.
|
||||||
|
/// The function supports native integer types, wide::integer, CityHash_v1_0_2::uint128.
|
||||||
|
template <typename T>
|
||||||
|
constexpr T unhexUInt(const char * data)
|
||||||
|
{
|
||||||
|
return impl::HexConversion<T>::unhex(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts a hexadecimal digit '0'..'f' or '0'..'F' to UInt8.
|
||||||
|
constexpr UInt8 unhex(char c)
|
||||||
|
{
|
||||||
|
return impl::unhexDigit(c);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts two hexadecimal digits to UInt8.
|
||||||
|
constexpr UInt8 unhex2(const char * data)
|
||||||
|
{
|
||||||
|
return unhexUInt<UInt8>(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts four hexadecimal digits to UInt16.
|
||||||
|
constexpr UInt16 unhex4(const char * data)
|
||||||
|
{
|
||||||
|
return unhexUInt<UInt16>(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Produces a binary representation of a single byte.
|
||||||
|
inline void writeBinByte(UInt8 byte, void * out)
|
||||||
|
{
|
||||||
|
memcpy(out, &impl::bin_byte_to_char_table[static_cast<size_t>(byte) * 8], 8);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts byte array to a hex string. Useful for debug logging.
|
||||||
|
inline std::string hexString(const void * data, size_t size)
|
||||||
|
{
|
||||||
|
const char * p = reinterpret_cast<const char *>(data);
|
||||||
|
std::string s(size * 2, '\0');
|
||||||
|
for (size_t i = 0; i < size; ++i)
|
||||||
|
writeHexByteLowercase(p[i], s.data() + i * 2);
|
||||||
|
return s;
|
||||||
|
}
|
18
base/base/interpolate.h
Normal file
18
base/base/interpolate.h
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
#pragma once
|
||||||
|
#include <cassert>
|
||||||
|
#include <cmath>
|
||||||
|
|
||||||
|
/** Linear interpolation in logarithmic coordinates.
|
||||||
|
* Exponential interpolation is related to linear interpolation
|
||||||
|
* exactly in same way as geometric mean is related to arithmetic mean.
|
||||||
|
*/
|
||||||
|
constexpr double interpolateExponential(double min, double max, double ratio)
|
||||||
|
{
|
||||||
|
assert(min > 0 && ratio >= 0 && ratio <= 1);
|
||||||
|
return min * std::pow(max / min, ratio);
|
||||||
|
}
|
||||||
|
|
||||||
|
constexpr double interpolateLinear(double min, double max, double ratio)
|
||||||
|
{
|
||||||
|
return std::lerp(min, max, ratio);
|
||||||
|
}
|
@ -20,14 +20,14 @@ Out & dumpValue(Out &, T &&);
|
|||||||
|
|
||||||
/// Catch-all case.
|
/// Catch-all case.
|
||||||
template <int priority, typename Out, typename T>
|
template <int priority, typename Out, typename T>
|
||||||
std::enable_if_t<priority == -1, Out> & dumpImpl(Out & out, T &&)
|
std::enable_if_t<priority == -1, Out> & dumpImpl(Out & out, T &&) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
return out << "{...}";
|
return out << "{...}";
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An object, that could be output with operator <<.
|
/// An object, that could be output with operator <<.
|
||||||
template <int priority, typename Out, typename T>
|
template <int priority, typename Out, typename T>
|
||||||
std::enable_if_t<priority == 0, Out> & dumpImpl(Out & out, T && x, std::decay_t<decltype(std::declval<Out &>() << std::declval<T>())> * = nullptr)
|
std::enable_if_t<priority == 0, Out> & dumpImpl(Out & out, T && x, std::decay_t<decltype(std::declval<Out &>() << std::declval<T>())> * = nullptr) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
return out << x;
|
return out << x;
|
||||||
}
|
}
|
||||||
@ -37,7 +37,7 @@ template <int priority, typename Out, typename T>
|
|||||||
std::enable_if_t<priority == 1
|
std::enable_if_t<priority == 1
|
||||||
/// Protect from the case when operator * do effectively nothing (function pointer).
|
/// Protect from the case when operator * do effectively nothing (function pointer).
|
||||||
&& !std::is_same_v<std::decay_t<T>, std::decay_t<decltype(*std::declval<T>())>>
|
&& !std::is_same_v<std::decay_t<T>, std::decay_t<decltype(*std::declval<T>())>>
|
||||||
, Out> & dumpImpl(Out & out, T && x, std::decay_t<decltype(*std::declval<T>())> * = nullptr)
|
, Out> & dumpImpl(Out & out, T && x, std::decay_t<decltype(*std::declval<T>())> * = nullptr) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
if (!x)
|
if (!x)
|
||||||
return out << "nullptr";
|
return out << "nullptr";
|
||||||
@ -46,7 +46,7 @@ std::enable_if_t<priority == 1
|
|||||||
|
|
||||||
/// Container.
|
/// Container.
|
||||||
template <int priority, typename Out, typename T>
|
template <int priority, typename Out, typename T>
|
||||||
std::enable_if_t<priority == 2, Out> & dumpImpl(Out & out, T && x, std::decay_t<decltype(std::begin(std::declval<T>()))> * = nullptr)
|
std::enable_if_t<priority == 2, Out> & dumpImpl(Out & out, T && x, std::decay_t<decltype(std::begin(std::declval<T>()))> * = nullptr) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
bool first = true;
|
bool first = true;
|
||||||
out << "{";
|
out << "{";
|
||||||
@ -64,7 +64,7 @@ std::enable_if_t<priority == 2, Out> & dumpImpl(Out & out, T && x, std::decay_t<
|
|||||||
|
|
||||||
template <int priority, typename Out, typename T>
|
template <int priority, typename Out, typename T>
|
||||||
std::enable_if_t<priority == 3 && std::is_enum_v<std::decay_t<T>>, Out> &
|
std::enable_if_t<priority == 3 && std::is_enum_v<std::decay_t<T>>, Out> &
|
||||||
dumpImpl(Out & out, T && x)
|
dumpImpl(Out & out, T && x) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
return out << magic_enum::enum_name(x);
|
return out << magic_enum::enum_name(x);
|
||||||
}
|
}
|
||||||
@ -73,7 +73,7 @@ dumpImpl(Out & out, T && x)
|
|||||||
|
|
||||||
template <int priority, typename Out, typename T>
|
template <int priority, typename Out, typename T>
|
||||||
std::enable_if_t<priority == 3 && (std::is_same_v<std::decay_t<T>, std::string> || std::is_same_v<std::decay_t<T>, const char *>), Out> &
|
std::enable_if_t<priority == 3 && (std::is_same_v<std::decay_t<T>, std::string> || std::is_same_v<std::decay_t<T>, const char *>), Out> &
|
||||||
dumpImpl(Out & out, T && x)
|
dumpImpl(Out & out, T && x) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
return out << std::quoted(x);
|
return out << std::quoted(x);
|
||||||
}
|
}
|
||||||
@ -82,7 +82,7 @@ dumpImpl(Out & out, T && x)
|
|||||||
|
|
||||||
template <int priority, typename Out, typename T>
|
template <int priority, typename Out, typename T>
|
||||||
std::enable_if_t<priority == 3 && std::is_same_v<std::decay_t<T>, unsigned char>, Out> &
|
std::enable_if_t<priority == 3 && std::is_same_v<std::decay_t<T>, unsigned char>, Out> &
|
||||||
dumpImpl(Out & out, T && x)
|
dumpImpl(Out & out, T && x) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
return out << int(x);
|
return out << int(x);
|
||||||
}
|
}
|
||||||
@ -90,7 +90,7 @@ dumpImpl(Out & out, T && x)
|
|||||||
|
|
||||||
/// Tuple, pair
|
/// Tuple, pair
|
||||||
template <size_t N, typename Out, typename T>
|
template <size_t N, typename Out, typename T>
|
||||||
Out & dumpTupleImpl(Out & out, T && x)
|
Out & dumpTupleImpl(Out & out, T && x) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
if constexpr (N == 0)
|
if constexpr (N == 0)
|
||||||
out << "{";
|
out << "{";
|
||||||
@ -108,36 +108,37 @@ Out & dumpTupleImpl(Out & out, T && x)
|
|||||||
}
|
}
|
||||||
|
|
||||||
template <int priority, typename Out, typename T>
|
template <int priority, typename Out, typename T>
|
||||||
std::enable_if_t<priority == 4, Out> & dumpImpl(Out & out, T && x, std::decay_t<decltype(std::get<0>(std::declval<T>()))> * = nullptr)
|
std::enable_if_t<priority == 4, Out> & dumpImpl(Out & out, T && x, std::decay_t<decltype(std::get<0>(std::declval<T>()))> * = nullptr) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
return dumpTupleImpl<0>(out, x);
|
return dumpTupleImpl<0>(out, x);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
template <int priority, typename Out, typename T>
|
template <int priority, typename Out, typename T>
|
||||||
Out & dumpDispatchPriorities(Out & out, T && x, std::decay_t<decltype(dumpImpl<priority>(std::declval<Out &>(), std::declval<T>()))> *)
|
Out & dumpDispatchPriorities(Out & out, T && x, std::decay_t<decltype(dumpImpl<priority>(std::declval<Out &>(), std::declval<T>()))> *) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
return dumpImpl<priority>(out, x);
|
return dumpImpl<priority>(out, x);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// NOLINTNEXTLINE(google-explicit-constructor)
|
||||||
struct LowPriority { LowPriority(void *) {} };
|
struct LowPriority { LowPriority(void *) {} };
|
||||||
|
|
||||||
template <int priority, typename Out, typename T>
|
template <int priority, typename Out, typename T>
|
||||||
Out & dumpDispatchPriorities(Out & out, T && x, LowPriority)
|
Out & dumpDispatchPriorities(Out & out, T && x, LowPriority) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
return dumpDispatchPriorities<priority - 1>(out, x, nullptr);
|
return dumpDispatchPriorities<priority - 1>(out, x, nullptr);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
template <typename Out, typename T>
|
template <typename Out, typename T>
|
||||||
Out & dumpValue(Out & out, T && x)
|
Out & dumpValue(Out & out, T && x) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
return dumpDispatchPriorities<5>(out, x, nullptr);
|
return dumpDispatchPriorities<5>(out, x, nullptr);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
template <typename Out, typename T>
|
template <typename Out, typename T>
|
||||||
Out & dump(Out & out, const char * name, T && x)
|
Out & dump(Out & out, const char * name, T && x) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
// Dumping string literal, printing name and demangled type is irrelevant.
|
// Dumping string literal, printing name and demangled type is irrelevant.
|
||||||
if constexpr (std::is_same_v<const char *, std::decay_t<std::remove_reference_t<T>>>)
|
if constexpr (std::is_same_v<const char *, std::decay_t<std::remove_reference_t<T>>>)
|
||||||
|
@ -33,6 +33,41 @@
|
|||||||
#include <base/extended_types.h>
|
#include <base/extended_types.h>
|
||||||
|
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
inline int digits10(T x)
|
||||||
|
{
|
||||||
|
if (x < 10ULL)
|
||||||
|
return 1;
|
||||||
|
if (x < 100ULL)
|
||||||
|
return 2;
|
||||||
|
if (x < 1000ULL)
|
||||||
|
return 3;
|
||||||
|
|
||||||
|
if (x < 1000000000000ULL)
|
||||||
|
{
|
||||||
|
if (x < 100000000ULL)
|
||||||
|
{
|
||||||
|
if (x < 1000000ULL)
|
||||||
|
{
|
||||||
|
if (x < 10000ULL)
|
||||||
|
return 4;
|
||||||
|
else
|
||||||
|
return 5 + (x >= 100000ULL);
|
||||||
|
}
|
||||||
|
|
||||||
|
return 7 + (x >= 10000000ULL);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (x < 10000000000ULL)
|
||||||
|
return 9 + (x >= 1000000000ULL);
|
||||||
|
|
||||||
|
return 11 + (x >= 100000000000ULL);
|
||||||
|
}
|
||||||
|
|
||||||
|
return 12 + digits10(x / 1000000000000ULL);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
namespace impl
|
namespace impl
|
||||||
{
|
{
|
||||||
|
|
||||||
@ -91,10 +126,10 @@ template <size_t N>
|
|||||||
using DivisionBy10PowN = typename SelectType
|
using DivisionBy10PowN = typename SelectType
|
||||||
<
|
<
|
||||||
N,
|
N,
|
||||||
Division<uint8_t, 0, 205U, 11>, /// divide by 10
|
Division<uint8_t, false, 205U, 11>, /// divide by 10
|
||||||
Division<uint16_t, 1, 41943U, 22>, /// divide by 100
|
Division<uint16_t, true, 41943U, 22>, /// divide by 100
|
||||||
Division<uint32_t, 0, 3518437209U, 45>, /// divide by 10000
|
Division<uint32_t, false, 3518437209U, 45>, /// divide by 10000
|
||||||
Division<uint64_t, 0, 12379400392853802749ULL, 90> /// divide by 100000000
|
Division<uint64_t, false, 12379400392853802749ULL, 90> /// divide by 100000000
|
||||||
>::Result;
|
>::Result;
|
||||||
|
|
||||||
template <size_t N>
|
template <size_t N>
|
||||||
@ -122,7 +157,7 @@ QuotientAndRemainder<N> static inline split(UnsignedOfSize<N> value)
|
|||||||
constexpr DivisionBy10PowN<N> division;
|
constexpr DivisionBy10PowN<N> division;
|
||||||
|
|
||||||
UnsignedOfSize<N> quotient = (division.multiplier * (UnsignedOfSize<2 * N>(value) + division.add)) >> division.shift;
|
UnsignedOfSize<N> quotient = (division.multiplier * (UnsignedOfSize<2 * N>(value) + division.add)) >> division.shift;
|
||||||
UnsignedOfSize<N / 2> remainder = value - quotient * pow10<UnsignedOfSize<N / 2>>(N);
|
UnsignedOfSize<N / 2> remainder = static_cast<UnsignedOfSize<N / 2>>(value - quotient * pow10<UnsignedOfSize<N / 2>>(N));
|
||||||
|
|
||||||
return {quotient, remainder};
|
return {quotient, remainder};
|
||||||
}
|
}
|
||||||
@ -312,39 +347,6 @@ namespace convert
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename T>
|
|
||||||
static inline int digits10(T x)
|
|
||||||
{
|
|
||||||
if (x < 10ULL)
|
|
||||||
return 1;
|
|
||||||
if (x < 100ULL)
|
|
||||||
return 2;
|
|
||||||
if (x < 1000ULL)
|
|
||||||
return 3;
|
|
||||||
|
|
||||||
if (x < 1000000000000ULL)
|
|
||||||
{
|
|
||||||
if (x < 100000000ULL)
|
|
||||||
{
|
|
||||||
if (x < 1000000ULL)
|
|
||||||
{
|
|
||||||
if (x < 10000ULL)
|
|
||||||
return 4;
|
|
||||||
else
|
|
||||||
return 5 + (x >= 100000ULL);
|
|
||||||
}
|
|
||||||
|
|
||||||
return 7 + (x >= 10000000ULL);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (x < 10000000000ULL)
|
|
||||||
return 9 + (x >= 1000000000ULL);
|
|
||||||
|
|
||||||
return 11 + (x >= 100000000000ULL);
|
|
||||||
}
|
|
||||||
|
|
||||||
return 12 + digits10(x / 1000000000000ULL);
|
|
||||||
}
|
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
static inline char * writeUIntText(T x, char * p)
|
static inline char * writeUIntText(T x, char * p)
|
||||||
@ -352,7 +354,7 @@ static inline char * writeUIntText(T x, char * p)
|
|||||||
static_assert(is_unsigned_v<T>);
|
static_assert(is_unsigned_v<T>);
|
||||||
|
|
||||||
int len = digits10(x);
|
int len = digits10(x);
|
||||||
auto pp = p + len;
|
auto * pp = p + len;
|
||||||
while (x >= 100)
|
while (x >= 100)
|
||||||
{
|
{
|
||||||
const auto i = x % 100;
|
const auto i = x % 100;
|
||||||
|
@ -1,53 +0,0 @@
|
|||||||
#pragma once
|
|
||||||
|
|
||||||
/// Macros for convenient usage of Poco logger.
|
|
||||||
|
|
||||||
#include <fmt/format.h>
|
|
||||||
#include <Poco/Logger.h>
|
|
||||||
#include <Poco/Message.h>
|
|
||||||
#include <Common/CurrentThread.h>
|
|
||||||
|
|
||||||
|
|
||||||
namespace
|
|
||||||
{
|
|
||||||
template <typename... Ts> constexpr size_t numArgs(Ts &&...) { return sizeof...(Ts); }
|
|
||||||
template <typename T, typename... Ts> constexpr auto firstArg(T && x, Ts &&...) { return std::forward<T>(x); }
|
|
||||||
/// For implicit conversion of fmt::basic_runtime<> to char* for std::string ctor
|
|
||||||
template <typename T, typename... Ts> constexpr auto firstArg(fmt::basic_runtime<T> && data, Ts &&...) { return data.str.data(); }
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/// Logs a message to a specified logger with that level.
|
|
||||||
/// If more than one argument is provided,
|
|
||||||
/// the first argument is interpreted as template with {}-substitutions
|
|
||||||
/// and the latter arguments treat as values to substitute.
|
|
||||||
/// If only one argument is provided, it is threat as message without substitutions.
|
|
||||||
|
|
||||||
#define LOG_IMPL(logger, priority, PRIORITY, ...) do \
|
|
||||||
{ \
|
|
||||||
const bool is_clients_log = (DB::CurrentThread::getGroup() != nullptr) && \
|
|
||||||
(DB::CurrentThread::getGroup()->client_logs_level >= (priority)); \
|
|
||||||
if ((logger)->is((PRIORITY)) || is_clients_log) \
|
|
||||||
{ \
|
|
||||||
std::string formatted_message = numArgs(__VA_ARGS__) > 1 ? fmt::format(__VA_ARGS__) : firstArg(__VA_ARGS__); \
|
|
||||||
if (auto channel = (logger)->getChannel()) \
|
|
||||||
{ \
|
|
||||||
std::string file_function; \
|
|
||||||
file_function += __FILE__; \
|
|
||||||
file_function += "; "; \
|
|
||||||
file_function += __PRETTY_FUNCTION__; \
|
|
||||||
Poco::Message poco_message((logger)->name(), formatted_message, \
|
|
||||||
(PRIORITY), file_function.c_str(), __LINE__); \
|
|
||||||
channel->log(poco_message); \
|
|
||||||
} \
|
|
||||||
} \
|
|
||||||
} while (false)
|
|
||||||
|
|
||||||
|
|
||||||
#define LOG_TEST(logger, ...) LOG_IMPL(logger, DB::LogsLevel::test, Poco::Message::PRIO_TEST, __VA_ARGS__)
|
|
||||||
#define LOG_TRACE(logger, ...) LOG_IMPL(logger, DB::LogsLevel::trace, Poco::Message::PRIO_TRACE, __VA_ARGS__)
|
|
||||||
#define LOG_DEBUG(logger, ...) LOG_IMPL(logger, DB::LogsLevel::debug, Poco::Message::PRIO_DEBUG, __VA_ARGS__)
|
|
||||||
#define LOG_INFO(logger, ...) LOG_IMPL(logger, DB::LogsLevel::information, Poco::Message::PRIO_INFORMATION, __VA_ARGS__)
|
|
||||||
#define LOG_WARNING(logger, ...) LOG_IMPL(logger, DB::LogsLevel::warning, Poco::Message::PRIO_WARNING, __VA_ARGS__)
|
|
||||||
#define LOG_ERROR(logger, ...) LOG_IMPL(logger, DB::LogsLevel::error, Poco::Message::PRIO_ERROR, __VA_ARGS__)
|
|
||||||
#define LOG_FATAL(logger, ...) LOG_IMPL(logger, DB::LogsLevel::error, Poco::Message::PRIO_FATAL, __VA_ARGS__)
|
|
9
base/base/move_extend.h
Normal file
9
base/base/move_extend.h
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
/// Extend @p to by moving elements from @p from to @p to end
|
||||||
|
/// @return @p to iterator to first of moved elements.
|
||||||
|
template <class To, class From>
|
||||||
|
typename To::iterator moveExtend(To & to, From && from)
|
||||||
|
{
|
||||||
|
return to.insert(to.end(), std::make_move_iterator(from.begin()), std::make_move_iterator(from.end()));
|
||||||
|
}
|
@ -3,7 +3,7 @@
|
|||||||
#include <cstddef>
|
#include <cstddef>
|
||||||
#include <cstdlib>
|
#include <cstdlib>
|
||||||
#include <cstring>
|
#include <cstring>
|
||||||
#include <errno.h>
|
#include <cerrno>
|
||||||
|
|
||||||
|
|
||||||
void * mremap_fallback(
|
void * mremap_fallback(
|
||||||
|
@ -1,12 +1,10 @@
|
|||||||
#ifdef HAS_RESERVED_IDENTIFIER
|
|
||||||
#pragma clang diagnostic ignored "-Wreserved-identifier"
|
#pragma clang diagnostic ignored "-Wreserved-identifier"
|
||||||
#endif
|
|
||||||
|
|
||||||
/// This code was based on the code by Fedor Korotkiy https://www.linkedin.com/in/fedor-korotkiy-659a1838/
|
/// This code was based on the code by Fedor Korotkiy https://www.linkedin.com/in/fedor-korotkiy-659a1838/
|
||||||
|
|
||||||
#include <base/defines.h>
|
#include <base/defines.h>
|
||||||
|
|
||||||
#if defined(__linux__) && !defined(THREAD_SANITIZER) && !defined(USE_MUSL)
|
#if defined(OS_LINUX) && !defined(THREAD_SANITIZER) && !defined(USE_MUSL)
|
||||||
#define USE_PHDR_CACHE 1
|
#define USE_PHDR_CACHE 1
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -8,6 +8,7 @@
|
|||||||
* As a drawback, this only works if no dynamic object unloading happens after this point.
|
* As a drawback, this only works if no dynamic object unloading happens after this point.
|
||||||
* This function is thread-safe. You should call it to update cache after loading new shared libraries.
|
* This function is thread-safe. You should call it to update cache after loading new shared libraries.
|
||||||
* Otherwise exception handling from dlopened libraries won't work (will call std::terminate immediately).
|
* Otherwise exception handling from dlopened libraries won't work (will call std::terminate immediately).
|
||||||
|
* NOTE: dlopen is forbidden in our code.
|
||||||
*
|
*
|
||||||
* NOTE: It is disabled with Thread Sanitizer because TSan can only use original "dl_iterate_phdr" function.
|
* NOTE: It is disabled with Thread Sanitizer because TSan can only use original "dl_iterate_phdr" function.
|
||||||
*/
|
*/
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
|
/** Origin: musl-libc /src/math/exp10.c
|
||||||
|
* https://www.musl-libc.org/
|
||||||
|
* http://git.musl-libc.org/cgit/musl/tree/src/math/exp10.c
|
||||||
|
*/
|
||||||
/*
|
/*
|
||||||
|
|
||||||
https://www.musl-libc.org/
|
|
||||||
http://git.musl-libc.org/cgit/musl/tree/src/math/exp10.c
|
|
||||||
|
|
||||||
musl as a whole is licensed under the following standard MIT license:
|
musl as a whole is licensed under the following standard MIT license:
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
@ -27,151 +27,11 @@ CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
|||||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
|
|
||||||
Authors/contributors include:
|
|
||||||
|
|
||||||
Alex Dowad
|
|
||||||
Alexander Monakov
|
|
||||||
Anthony G. Basile
|
|
||||||
Arvid Picciani
|
|
||||||
Bobby Bingham
|
|
||||||
Boris Brezillon
|
|
||||||
Brent Cook
|
|
||||||
Chris Spiegel
|
|
||||||
Clément Vasseur
|
|
||||||
Daniel Micay
|
|
||||||
Denys Vlasenko
|
|
||||||
Emil Renner Berthing
|
|
||||||
Felix Fietkau
|
|
||||||
Felix Janda
|
|
||||||
Gianluca Anzolin
|
|
||||||
Hauke Mehrtens
|
|
||||||
Hiltjo Posthuma
|
|
||||||
Isaac Dunham
|
|
||||||
Jaydeep Patil
|
|
||||||
Jens Gustedt
|
|
||||||
Jeremy Huntwork
|
|
||||||
Jo-Philipp Wich
|
|
||||||
Joakim Sindholt
|
|
||||||
John Spencer
|
|
||||||
Josiah Worcester
|
|
||||||
Justin Cormack
|
|
||||||
Khem Raj
|
|
||||||
Kylie McClain
|
|
||||||
Luca Barbato
|
|
||||||
Luka Perkov
|
|
||||||
M Farkas-Dyck (Strake)
|
|
||||||
Mahesh Bodapati
|
|
||||||
Michael Forney
|
|
||||||
Natanael Copa
|
|
||||||
Nicholas J. Kain
|
|
||||||
orc
|
|
||||||
Pascal Cuoq
|
|
||||||
Petr Hosek
|
|
||||||
Pierre Carrier
|
|
||||||
Rich Felker
|
|
||||||
Richard Pennington
|
|
||||||
Shiz
|
|
||||||
sin
|
|
||||||
Solar Designer
|
|
||||||
Stefan Kristiansson
|
|
||||||
Szabolcs Nagy
|
|
||||||
Timo Teräs
|
|
||||||
Trutz Behn
|
|
||||||
Valentin Ochs
|
|
||||||
William Haddon
|
|
||||||
|
|
||||||
Portions of this software are derived from third-party works licensed
|
|
||||||
under terms compatible with the above MIT license:
|
|
||||||
|
|
||||||
The TRE regular expression implementation (src/regex/reg* and
|
|
||||||
src/regex/tre*) is Copyright © 2001-2008 Ville Laurikari and licensed
|
|
||||||
under a 2-clause BSD license (license text in the source files). The
|
|
||||||
included version has been heavily modified by Rich Felker in 2012, in
|
|
||||||
the interests of size, simplicity, and namespace cleanliness.
|
|
||||||
|
|
||||||
Much of the math library code (src/math/ * and src/complex/ *) is
|
|
||||||
Copyright © 1993,2004 Sun Microsystems or
|
|
||||||
Copyright © 2003-2011 David Schultz or
|
|
||||||
Copyright © 2003-2009 Steven G. Kargl or
|
|
||||||
Copyright © 2003-2009 Bruce D. Evans or
|
|
||||||
Copyright © 2008 Stephen L. Moshier
|
|
||||||
and labelled as such in comments in the individual source files. All
|
|
||||||
have been licensed under extremely permissive terms.
|
|
||||||
|
|
||||||
The ARM memcpy code (src/string/arm/memcpy_el.S) is Copyright © 2008
|
|
||||||
The Android Open Source Project and is licensed under a two-clause BSD
|
|
||||||
license. It was taken from Bionic libc, used on Android.
|
|
||||||
|
|
||||||
The implementation of DES for crypt (src/crypt/crypt_des.c) is
|
|
||||||
Copyright © 1994 David Burren. It is licensed under a BSD license.
|
|
||||||
|
|
||||||
The implementation of blowfish crypt (src/crypt/crypt_blowfish.c) was
|
|
||||||
originally written by Solar Designer and placed into the public
|
|
||||||
domain. The code also comes with a fallback permissive license for use
|
|
||||||
in jurisdictions that may not recognize the public domain.
|
|
||||||
|
|
||||||
The smoothsort implementation (src/stdlib/qsort.c) is Copyright © 2011
|
|
||||||
Valentin Ochs and is licensed under an MIT-style license.
|
|
||||||
|
|
||||||
The BSD PRNG implementation (src/prng/random.c) and XSI search API
|
|
||||||
(src/search/ *.c) functions are Copyright © 2011 Szabolcs Nagy and
|
|
||||||
licensed under following terms: "Permission to use, copy, modify,
|
|
||||||
and/or distribute this code for any purpose with or without fee is
|
|
||||||
hereby granted. There is no warranty."
|
|
||||||
|
|
||||||
The x86_64 port was written by Nicholas J. Kain and is licensed under
|
|
||||||
the standard MIT terms.
|
|
||||||
|
|
||||||
The mips and microblaze ports were originally written by Richard
|
|
||||||
Pennington for use in the ellcc project. The original code was adapted
|
|
||||||
by Rich Felker for build system and code conventions during upstream
|
|
||||||
integration. It is licensed under the standard MIT terms.
|
|
||||||
|
|
||||||
The mips64 port was contributed by Imagination Technologies and is
|
|
||||||
licensed under the standard MIT terms.
|
|
||||||
|
|
||||||
The powerpc port was also originally written by Richard Pennington,
|
|
||||||
and later supplemented and integrated by John Spencer. It is licensed
|
|
||||||
under the standard MIT terms.
|
|
||||||
|
|
||||||
All other files which have no copyright comments are original works
|
|
||||||
produced specifically for use as part of this library, written either
|
|
||||||
by Rich Felker, the main author of the library, or by one or more
|
|
||||||
contibutors listed above. Details on authorship of individual files
|
|
||||||
can be found in the git version control history of the project. The
|
|
||||||
omission of copyright and license comments in each file is in the
|
|
||||||
interest of source tree size.
|
|
||||||
|
|
||||||
In addition, permission is hereby granted for all public header files
|
|
||||||
(include/ * and arch/ * /bits/ *) and crt files intended to be linked into
|
|
||||||
applications (crt/ *, ldso/dlstart.c, and arch/ * /crt_arch.h) to omit
|
|
||||||
the copyright notice and permission notice otherwise required by the
|
|
||||||
license, and to use these files without any requirement of
|
|
||||||
attribution. These files include substantial contributions from:
|
|
||||||
|
|
||||||
Bobby Bingham
|
|
||||||
John Spencer
|
|
||||||
Nicholas J. Kain
|
|
||||||
Rich Felker
|
|
||||||
Richard Pennington
|
|
||||||
Stefan Kristiansson
|
|
||||||
Szabolcs Nagy
|
|
||||||
|
|
||||||
all of whom have explicitly granted such permission.
|
|
||||||
|
|
||||||
This file previously contained text expressing a belief that most of
|
|
||||||
the files covered by the above exception were sufficiently trivial not
|
|
||||||
to be subject to copyright, resulting in confusion over whether it
|
|
||||||
negated the permissions granted in the license. In the spirit of
|
|
||||||
permissive licensing, and of not having licensing issues being an
|
|
||||||
obstacle to adoption, that text has been removed.
|
|
||||||
|
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include <math.h>
|
#include <cmath>
|
||||||
#include <stdint.h>
|
#include <cstdint>
|
||||||
#include <stdio.h>
|
#include <cstdio>
|
||||||
|
|
||||||
double preciseExp10(double x)
|
double preciseExp10(double x)
|
||||||
{
|
{
|
||||||
|
@ -3,9 +3,9 @@
|
|||||||
/** exp10 from GNU libm fails to give precise result for integer arguments.
|
/** exp10 from GNU libm fails to give precise result for integer arguments.
|
||||||
* For example, exp10(3) gives 1000.0000000000001
|
* For example, exp10(3) gives 1000.0000000000001
|
||||||
* despite the fact that 1000 is exactly representable in double and float.
|
* despite the fact that 1000 is exactly representable in double and float.
|
||||||
* Better to always use implementation from MUSL.
|
* Better to always use our own implementation based on a MUSL's one.
|
||||||
*
|
*
|
||||||
* Note: the function names are different to avoid confusion with symbols from the system libm.
|
* Note: the function name is different to avoid confusion with symbols from the system libm.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
double preciseExp10(double x);
|
double preciseExp10(double x);
|
||||||
|
@ -1,9 +1,10 @@
|
|||||||
#if defined(OS_LINUX)
|
#if defined(OS_LINUX)
|
||||||
# include <sys/syscall.h>
|
# include <sys/syscall.h>
|
||||||
#endif
|
#endif
|
||||||
|
#include <cstdlib>
|
||||||
#include <unistd.h>
|
#include <unistd.h>
|
||||||
#include <base/safeExit.h>
|
#include <base/safeExit.h>
|
||||||
#include <base/defines.h>
|
#include <base/defines.h> /// for THREAD_SANITIZER
|
||||||
|
|
||||||
[[noreturn]] void safeExit(int code)
|
[[noreturn]] void safeExit(int code)
|
||||||
{
|
{
|
||||||
@ -11,7 +12,7 @@
|
|||||||
/// Thread sanitizer tries to do something on exit that we don't need if we want to exit immediately,
|
/// Thread sanitizer tries to do something on exit that we don't need if we want to exit immediately,
|
||||||
/// while connection handling threads are still run.
|
/// while connection handling threads are still run.
|
||||||
(void)syscall(SYS_exit_group, code);
|
(void)syscall(SYS_exit_group, code);
|
||||||
__builtin_unreachable();
|
UNREACHABLE();
|
||||||
#else
|
#else
|
||||||
_exit(code);
|
_exit(code);
|
||||||
#endif
|
#endif
|
||||||
|
@ -5,13 +5,13 @@
|
|||||||
#include <utility>
|
#include <utility>
|
||||||
|
|
||||||
template <class F>
|
template <class F>
|
||||||
class [[nodiscard]] basic_scope_guard
|
class [[nodiscard]] BasicScopeGuard
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
constexpr basic_scope_guard() = default;
|
constexpr BasicScopeGuard() = default;
|
||||||
constexpr basic_scope_guard(basic_scope_guard && src) : function{src.release()} {}
|
constexpr BasicScopeGuard(BasicScopeGuard && src) : function{src.release()} {} // NOLINT(hicpp-noexcept-move, performance-noexcept-move-constructor, cppcoreguidelines-noexcept-move-operations)
|
||||||
|
|
||||||
constexpr basic_scope_guard & operator=(basic_scope_guard && src)
|
constexpr BasicScopeGuard & operator=(BasicScopeGuard && src) // NOLINT(hicpp-noexcept-move, performance-noexcept-move-constructor, cppcoreguidelines-noexcept-move-operations)
|
||||||
{
|
{
|
||||||
if (this != &src)
|
if (this != &src)
|
||||||
{
|
{
|
||||||
@ -21,11 +21,13 @@ public:
|
|||||||
return *this;
|
return *this;
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename G, typename = std::enable_if_t<std::is_convertible_v<G, F>, void>>
|
template <typename G>
|
||||||
constexpr basic_scope_guard(basic_scope_guard<G> && src) : function{src.release()} {}
|
requires std::is_convertible_v<G, F>
|
||||||
|
constexpr BasicScopeGuard(BasicScopeGuard<G> && src) : function{src.release()} {} // NOLINT(google-explicit-constructor, cppcoreguidelines-rvalue-reference-param-not-moved, cppcoreguidelines-noexcept-move-operations)
|
||||||
|
|
||||||
template <typename G, typename = std::enable_if_t<std::is_convertible_v<G, F>, void>>
|
template <typename G>
|
||||||
constexpr basic_scope_guard & operator=(basic_scope_guard<G> && src)
|
requires std::is_convertible_v<G, F>
|
||||||
|
constexpr BasicScopeGuard & operator=(BasicScopeGuard<G> && src) // NOLINT(cppcoreguidelines-rvalue-reference-param-not-moved, cppcoreguidelines-noexcept-move-operations)
|
||||||
{
|
{
|
||||||
if (this != &src)
|
if (this != &src)
|
||||||
{
|
{
|
||||||
@ -35,13 +37,15 @@ public:
|
|||||||
return *this;
|
return *this;
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename G, typename = std::enable_if_t<std::is_convertible_v<G, F>, void>>
|
template <typename G>
|
||||||
constexpr basic_scope_guard(const G & function_) : function{function_} {}
|
requires std::is_convertible_v<G, F>
|
||||||
|
constexpr BasicScopeGuard(const G & function_) : function{function_} {} // NOLINT(google-explicit-constructor)
|
||||||
|
|
||||||
template <typename G, typename = std::enable_if_t<std::is_convertible_v<G, F>, void>>
|
template <typename G>
|
||||||
constexpr basic_scope_guard(G && function_) : function{std::move(function_)} {}
|
requires std::is_convertible_v<G, F>
|
||||||
|
constexpr BasicScopeGuard(G && function_) : function{std::move(function_)} {} // NOLINT(google-explicit-constructor, bugprone-forwarding-reference-overload, bugprone-move-forwarding-reference, cppcoreguidelines-missing-std-forward)
|
||||||
|
|
||||||
~basic_scope_guard() { invoke(); }
|
~BasicScopeGuard() { invoke(); }
|
||||||
|
|
||||||
static constexpr bool is_nullable = std::is_constructible_v<bool, F>;
|
static constexpr bool is_nullable = std::is_constructible_v<bool, F>;
|
||||||
|
|
||||||
@ -64,8 +68,9 @@ public:
|
|||||||
return std::exchange(function, {});
|
return std::exchange(function, {});
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename G, typename = std::enable_if_t<std::is_convertible_v<G, F>, void>>
|
template <typename G>
|
||||||
basic_scope_guard<F> & join(basic_scope_guard<G> && other)
|
requires std::is_convertible_v<G, F>
|
||||||
|
BasicScopeGuard<F> & join(BasicScopeGuard<G> && other) // NOLINT(cppcoreguidelines-rvalue-reference-param-not-moved)
|
||||||
{
|
{
|
||||||
if (other.function)
|
if (other.function)
|
||||||
{
|
{
|
||||||
@ -97,14 +102,13 @@ private:
|
|||||||
F function = F{};
|
F function = F{};
|
||||||
};
|
};
|
||||||
|
|
||||||
using scope_guard = basic_scope_guard<std::function<void(void)>>;
|
using scope_guard = BasicScopeGuard<std::function<void(void)>>;
|
||||||
|
|
||||||
|
|
||||||
template <class F>
|
template <class F>
|
||||||
inline basic_scope_guard<F> make_scope_guard(F && function_) { return std::forward<F>(function_); }
|
inline BasicScopeGuard<F> make_scope_guard(F && function_) { return std::forward<F>(function_); }
|
||||||
|
|
||||||
#define SCOPE_EXIT_CONCAT(n, ...) \
|
#define SCOPE_EXIT_CONCAT(n, ...) \
|
||||||
const auto scope_exit##n = make_scope_guard([&] { __VA_ARGS__; })
|
const auto scope_exit##n = make_scope_guard([&] { __VA_ARGS__; })
|
||||||
#define SCOPE_EXIT_FWD(n, ...) SCOPE_EXIT_CONCAT(n, __VA_ARGS__)
|
#define SCOPE_EXIT_FWD(n, ...) SCOPE_EXIT_CONCAT(n, __VA_ARGS__)
|
||||||
#define SCOPE_EXIT(...) SCOPE_EXIT_FWD(__LINE__, __VA_ARGS__)
|
#define SCOPE_EXIT(...) SCOPE_EXIT_FWD(__LINE__, __VA_ARGS__)
|
||||||
|
|
||||||
|
@ -1,28 +0,0 @@
|
|||||||
#include <base/setTerminalEcho.h>
|
|
||||||
#include <base/errnoToString.h>
|
|
||||||
#include <stdexcept>
|
|
||||||
#include <cstring>
|
|
||||||
#include <string>
|
|
||||||
#include <termios.h>
|
|
||||||
#include <unistd.h>
|
|
||||||
|
|
||||||
|
|
||||||
void setTerminalEcho(bool enable)
|
|
||||||
{
|
|
||||||
/// Obtain terminal attributes,
|
|
||||||
/// toggle the ECHO flag
|
|
||||||
/// and set them back.
|
|
||||||
|
|
||||||
struct termios tty{};
|
|
||||||
|
|
||||||
if (0 != tcgetattr(STDIN_FILENO, &tty))
|
|
||||||
throw std::runtime_error(std::string("setTerminalEcho failed get: ") + errnoToString(errno));
|
|
||||||
|
|
||||||
if (enable)
|
|
||||||
tty.c_lflag |= ECHO;
|
|
||||||
else
|
|
||||||
tty.c_lflag &= ~ECHO;
|
|
||||||
|
|
||||||
if (0 != tcsetattr(STDIN_FILENO, TCSANOW, &tty))
|
|
||||||
throw std::runtime_error(std::string("setTerminalEcho failed set: ") + errnoToString(errno));
|
|
||||||
}
|
|
@ -1,4 +0,0 @@
|
|||||||
#pragma once
|
|
||||||
|
|
||||||
/// Enable or disable echoing of typed characters. Throws std::runtime_error on error.
|
|
||||||
void setTerminalEcho(bool enable);
|
|
@ -1,36 +0,0 @@
|
|||||||
#pragma once
|
|
||||||
|
|
||||||
#include <memory>
|
|
||||||
|
|
||||||
|
|
||||||
/** Allows to make std::shared_ptr from T with protected constructor.
|
|
||||||
*
|
|
||||||
* Derive your T class from shared_ptr_helper<T> and add shared_ptr_helper<T> as a friend
|
|
||||||
* and you will have static 'create' method in your class.
|
|
||||||
*/
|
|
||||||
template <typename T>
|
|
||||||
struct shared_ptr_helper
|
|
||||||
{
|
|
||||||
template <typename... TArgs>
|
|
||||||
static std::shared_ptr<T> create(TArgs &&... args)
|
|
||||||
{
|
|
||||||
return std::shared_ptr<T>(new T(std::forward<TArgs>(args)...));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
template <typename T>
|
|
||||||
struct is_shared_ptr
|
|
||||||
{
|
|
||||||
static constexpr bool value = false;
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
template <typename T>
|
|
||||||
struct is_shared_ptr<std::shared_ptr<T>>
|
|
||||||
{
|
|
||||||
static constexpr bool value = true;
|
|
||||||
};
|
|
||||||
|
|
||||||
template <typename T>
|
|
||||||
inline constexpr bool is_shared_ptr_v = is_shared_ptr<T>::value;
|
|
@ -14,37 +14,37 @@ static T shift10Impl(T x, int exponent)
|
|||||||
static const long double powers10[] =
|
static const long double powers10[] =
|
||||||
{
|
{
|
||||||
1e-323L, 1e-322L, 1e-321L, 1e-320L, 1e-319L, 1e-318L, 1e-317L, 1e-316L, 1e-315L, 1e-314L, 1e-313L, 1e-312L, 1e-311L,
|
1e-323L, 1e-322L, 1e-321L, 1e-320L, 1e-319L, 1e-318L, 1e-317L, 1e-316L, 1e-315L, 1e-314L, 1e-313L, 1e-312L, 1e-311L,
|
||||||
1e-310L,1e-309L,1e-308L,1e-307L,1e-306L,1e-305L,1e-304L,1e-303L,1e-302L,1e-301L,1e-300L,1e-299L,1e-298L,1e-297L,1e-296L,1e-295L,1e-294L,1e-293L,1e-292L,1e-291L,
|
1e-310L, 1e-309L, 1e-308L, 1e-307L, 1e-306L, 1e-305L, 1e-304L, 1e-303L, 1e-302L, 1e-301L, 1e-300L, 1e-299L, 1e-298L, 1e-297L, 1e-296L, 1e-295L, 1e-294L, 1e-293L, 1e-292L, 1e-291L,
|
||||||
1e-290L,1e-289L,1e-288L,1e-287L,1e-286L,1e-285L,1e-284L,1e-283L,1e-282L,1e-281L,1e-280L,1e-279L,1e-278L,1e-277L,1e-276L,1e-275L,1e-274L,1e-273L,1e-272L,1e-271L,
|
1e-290L, 1e-289L, 1e-288L, 1e-287L, 1e-286L, 1e-285L, 1e-284L, 1e-283L, 1e-282L, 1e-281L, 1e-280L, 1e-279L, 1e-278L, 1e-277L, 1e-276L, 1e-275L, 1e-274L, 1e-273L, 1e-272L, 1e-271L,
|
||||||
1e-270L,1e-269L,1e-268L,1e-267L,1e-266L,1e-265L,1e-264L,1e-263L,1e-262L,1e-261L,1e-260L,1e-259L,1e-258L,1e-257L,1e-256L,1e-255L,1e-254L,1e-253L,1e-252L,1e-251L,
|
1e-270L, 1e-269L, 1e-268L, 1e-267L, 1e-266L, 1e-265L, 1e-264L, 1e-263L, 1e-262L, 1e-261L, 1e-260L, 1e-259L, 1e-258L, 1e-257L, 1e-256L, 1e-255L, 1e-254L, 1e-253L, 1e-252L, 1e-251L,
|
||||||
1e-250L,1e-249L,1e-248L,1e-247L,1e-246L,1e-245L,1e-244L,1e-243L,1e-242L,1e-241L,1e-240L,1e-239L,1e-238L,1e-237L,1e-236L,1e-235L,1e-234L,1e-233L,1e-232L,1e-231L,
|
1e-250L, 1e-249L, 1e-248L, 1e-247L, 1e-246L, 1e-245L, 1e-244L, 1e-243L, 1e-242L, 1e-241L, 1e-240L, 1e-239L, 1e-238L, 1e-237L, 1e-236L, 1e-235L, 1e-234L, 1e-233L, 1e-232L, 1e-231L,
|
||||||
1e-230L,1e-229L,1e-228L,1e-227L,1e-226L,1e-225L,1e-224L,1e-223L,1e-222L,1e-221L,1e-220L,1e-219L,1e-218L,1e-217L,1e-216L,1e-215L,1e-214L,1e-213L,1e-212L,1e-211L,
|
1e-230L, 1e-229L, 1e-228L, 1e-227L, 1e-226L, 1e-225L, 1e-224L, 1e-223L, 1e-222L, 1e-221L, 1e-220L, 1e-219L, 1e-218L, 1e-217L, 1e-216L, 1e-215L, 1e-214L, 1e-213L, 1e-212L, 1e-211L,
|
||||||
1e-210L,1e-209L,1e-208L,1e-207L,1e-206L,1e-205L,1e-204L,1e-203L,1e-202L,1e-201L,1e-200L,1e-199L,1e-198L,1e-197L,1e-196L,1e-195L,1e-194L,1e-193L,1e-192L,1e-191L,
|
1e-210L, 1e-209L, 1e-208L, 1e-207L, 1e-206L, 1e-205L, 1e-204L, 1e-203L, 1e-202L, 1e-201L, 1e-200L, 1e-199L, 1e-198L, 1e-197L, 1e-196L, 1e-195L, 1e-194L, 1e-193L, 1e-192L, 1e-191L,
|
||||||
1e-190L,1e-189L,1e-188L,1e-187L,1e-186L,1e-185L,1e-184L,1e-183L,1e-182L,1e-181L,1e-180L,1e-179L,1e-178L,1e-177L,1e-176L,1e-175L,1e-174L,1e-173L,1e-172L,1e-171L,
|
1e-190L, 1e-189L, 1e-188L, 1e-187L, 1e-186L, 1e-185L, 1e-184L, 1e-183L, 1e-182L, 1e-181L, 1e-180L, 1e-179L, 1e-178L, 1e-177L, 1e-176L, 1e-175L, 1e-174L, 1e-173L, 1e-172L, 1e-171L,
|
||||||
1e-170L,1e-169L,1e-168L,1e-167L,1e-166L,1e-165L,1e-164L,1e-163L,1e-162L,1e-161L,1e-160L,1e-159L,1e-158L,1e-157L,1e-156L,1e-155L,1e-154L,1e-153L,1e-152L,1e-151L,
|
1e-170L, 1e-169L, 1e-168L, 1e-167L, 1e-166L, 1e-165L, 1e-164L, 1e-163L, 1e-162L, 1e-161L, 1e-160L, 1e-159L, 1e-158L, 1e-157L, 1e-156L, 1e-155L, 1e-154L, 1e-153L, 1e-152L, 1e-151L,
|
||||||
1e-150L,1e-149L,1e-148L,1e-147L,1e-146L,1e-145L,1e-144L,1e-143L,1e-142L,1e-141L,1e-140L,1e-139L,1e-138L,1e-137L,1e-136L,1e-135L,1e-134L,1e-133L,1e-132L,1e-131L,
|
1e-150L, 1e-149L, 1e-148L, 1e-147L, 1e-146L, 1e-145L, 1e-144L, 1e-143L, 1e-142L, 1e-141L, 1e-140L, 1e-139L, 1e-138L, 1e-137L, 1e-136L, 1e-135L, 1e-134L, 1e-133L, 1e-132L, 1e-131L,
|
||||||
1e-130L,1e-129L,1e-128L,1e-127L,1e-126L,1e-125L,1e-124L,1e-123L,1e-122L,1e-121L,1e-120L,1e-119L,1e-118L,1e-117L,1e-116L,1e-115L,1e-114L,1e-113L,1e-112L,1e-111L,
|
1e-130L, 1e-129L, 1e-128L, 1e-127L, 1e-126L, 1e-125L, 1e-124L, 1e-123L, 1e-122L, 1e-121L, 1e-120L, 1e-119L, 1e-118L, 1e-117L, 1e-116L, 1e-115L, 1e-114L, 1e-113L, 1e-112L, 1e-111L,
|
||||||
1e-110L,1e-109L,1e-108L,1e-107L,1e-106L,1e-105L,1e-104L,1e-103L,1e-102L,1e-101L,1e-100L,1e-99L,1e-98L,1e-97L,1e-96L,1e-95L,1e-94L,1e-93L,1e-92L,1e-91L,1e-90L,
|
1e-110L, 1e-109L, 1e-108L, 1e-107L, 1e-106L, 1e-105L, 1e-104L, 1e-103L, 1e-102L, 1e-101L, 1e-100L, 1e-99L, 1e-98L, 1e-97L, 1e-96L, 1e-95L, 1e-94L, 1e-93L, 1e-92L, 1e-91L, 1e-90L,
|
||||||
1e-89L,1e-88L,1e-87L,1e-86L,1e-85L,1e-84L,1e-83L,1e-82L,1e-81L,1e-80L,1e-79L,1e-78L,1e-77L,1e-76L,1e-75L,1e-74L,1e-73L,1e-72L,1e-71L,1e-70,
|
1e-89L, 1e-88L, 1e-87L, 1e-86L, 1e-85L, 1e-84L, 1e-83L, 1e-82L, 1e-81L, 1e-80L, 1e-79L, 1e-78L, 1e-77L, 1e-76L, 1e-75L, 1e-74L, 1e-73L, 1e-72L, 1e-71L, 1e-70,
|
||||||
1e-69L,1e-68L,1e-67L,1e-66L,1e-65L,1e-64L,1e-63L,1e-62L,1e-61L,1e-60L,1e-59L,1e-58L,1e-57L,1e-56L,1e-55L,1e-54L,1e-53L,1e-52L,1e-51L,1e-50,
|
1e-69L, 1e-68L, 1e-67L, 1e-66L, 1e-65L, 1e-64L, 1e-63L, 1e-62L, 1e-61L, 1e-60L, 1e-59L, 1e-58L, 1e-57L, 1e-56L, 1e-55L, 1e-54L, 1e-53L, 1e-52L, 1e-51L, 1e-50,
|
||||||
1e-49L,1e-48L,1e-47L,1e-46L,1e-45L,1e-44L,1e-43L,1e-42L,1e-41L,1e-40L,1e-39L,1e-38L,1e-37L,1e-36L,1e-35L,1e-34L,1e-33L,1e-32L,1e-31L,1e-30,
|
1e-49L, 1e-48L, 1e-47L, 1e-46L, 1e-45L, 1e-44L, 1e-43L, 1e-42L, 1e-41L, 1e-40L, 1e-39L, 1e-38L, 1e-37L, 1e-36L, 1e-35L, 1e-34L, 1e-33L, 1e-32L, 1e-31L, 1e-30,
|
||||||
1e-29L,1e-28L,1e-27L,1e-26L,1e-25L,1e-24L,1e-23L,1e-22L,1e-21L,1e-20L,1e-19L,1e-18L,1e-17L,1e-16L,1e-15L,1e-14L,1e-13L,1e-12L,1e-11L,1e-10,
|
1e-29L, 1e-28L, 1e-27L, 1e-26L, 1e-25L, 1e-24L, 1e-23L, 1e-22L, 1e-21L, 1e-20L, 1e-19L, 1e-18L, 1e-17L, 1e-16L, 1e-15L, 1e-14L, 1e-13L, 1e-12L, 1e-11L, 1e-10,
|
||||||
1e-9L,1e-8L,1e-7L,1e-6L,1e-5L,1e-4L,1e-3L,1e-2L,1e-1L,1e0L,1e1L,1e2L,1e3L,1e4L,1e5L,1e6L,1e7L,1e8L,1e9L,1e10,
|
1e-9L, 1e-8L, 1e-7L, 1e-6L, 1e-5L, 1e-4L, 1e-3L, 1e-2L, 1e-1L, 1e0L, 1e1L, 1e2L, 1e3L, 1e4L, 1e5L, 1e6L, 1e7L, 1e8L, 1e9L, 1e10,
|
||||||
1e11L,1e12L,1e13L,1e14L,1e15L,1e16L,1e17L,1e18L,1e19L,1e20L,1e21L,1e22L,1e23L,1e24L,1e25L,1e26L,1e27L,1e28L,1e29L,1e30,
|
1e11L, 1e12L, 1e13L, 1e14L, 1e15L, 1e16L, 1e17L, 1e18L, 1e19L, 1e20L, 1e21L, 1e22L, 1e23L, 1e24L, 1e25L, 1e26L, 1e27L, 1e28L, 1e29L, 1e30,
|
||||||
1e31L,1e32L,1e33L,1e34L,1e35L,1e36L,1e37L,1e38L,1e39L,1e40L,1e41L,1e42L,1e43L,1e44L,1e45L,1e46L,1e47L,1e48L,1e49L,1e50,
|
1e31L, 1e32L, 1e33L, 1e34L, 1e35L, 1e36L, 1e37L, 1e38L, 1e39L, 1e40L, 1e41L, 1e42L, 1e43L, 1e44L, 1e45L, 1e46L, 1e47L, 1e48L, 1e49L, 1e50,
|
||||||
1e51L,1e52L,1e53L,1e54L,1e55L,1e56L,1e57L,1e58L,1e59L,1e60L,1e61L,1e62L,1e63L,1e64L,1e65L,1e66L,1e67L,1e68L,1e69L,1e70,
|
1e51L, 1e52L, 1e53L, 1e54L, 1e55L, 1e56L, 1e57L, 1e58L, 1e59L, 1e60L, 1e61L, 1e62L, 1e63L, 1e64L, 1e65L, 1e66L, 1e67L, 1e68L, 1e69L, 1e70,
|
||||||
1e71L,1e72L,1e73L,1e74L,1e75L,1e76L,1e77L,1e78L,1e79L,1e80L,1e81L,1e82L,1e83L,1e84L,1e85L,1e86L,1e87L,1e88L,1e89L,1e90,
|
1e71L, 1e72L, 1e73L, 1e74L, 1e75L, 1e76L, 1e77L, 1e78L, 1e79L, 1e80L, 1e81L, 1e82L, 1e83L, 1e84L, 1e85L, 1e86L, 1e87L, 1e88L, 1e89L, 1e90,
|
||||||
1e91L,1e92L,1e93L,1e94L,1e95L,1e96L,1e97L,1e98L,1e99L,1e100L,1e101L,1e102L,1e103L,1e104L,1e105L,1e106L,1e107L,1e108L,1e109L,1e110,
|
1e91L, 1e92L, 1e93L, 1e94L, 1e95L, 1e96L, 1e97L, 1e98L, 1e99L, 1e100L, 1e101L, 1e102L, 1e103L, 1e104L, 1e105L, 1e106L, 1e107L, 1e108L, 1e109L, 1e110,
|
||||||
1e111L,1e112L,1e113L,1e114L,1e115L,1e116L,1e117L,1e118L,1e119L,1e120L,1e121L,1e122L,1e123L,1e124L,1e125L,1e126L,1e127L,1e128L,1e129L,1e130,
|
1e111L, 1e112L, 1e113L, 1e114L, 1e115L, 1e116L, 1e117L, 1e118L, 1e119L, 1e120L, 1e121L, 1e122L, 1e123L, 1e124L, 1e125L, 1e126L, 1e127L, 1e128L, 1e129L, 1e130,
|
||||||
1e131L,1e132L,1e133L,1e134L,1e135L,1e136L,1e137L,1e138L,1e139L,1e140L,1e141L,1e142L,1e143L,1e144L,1e145L,1e146L,1e147L,1e148L,1e149L,1e150,
|
1e131L, 1e132L, 1e133L, 1e134L, 1e135L, 1e136L, 1e137L, 1e138L, 1e139L, 1e140L, 1e141L, 1e142L, 1e143L, 1e144L, 1e145L, 1e146L, 1e147L, 1e148L, 1e149L, 1e150,
|
||||||
1e151L,1e152L,1e153L,1e154L,1e155L,1e156L,1e157L,1e158L,1e159L,1e160L,1e161L,1e162L,1e163L,1e164L,1e165L,1e166L,1e167L,1e168L,1e169L,1e170,
|
1e151L, 1e152L, 1e153L, 1e154L, 1e155L, 1e156L, 1e157L, 1e158L, 1e159L, 1e160L, 1e161L, 1e162L, 1e163L, 1e164L, 1e165L, 1e166L, 1e167L, 1e168L, 1e169L, 1e170,
|
||||||
1e171L,1e172L,1e173L,1e174L,1e175L,1e176L,1e177L,1e178L,1e179L,1e180L,1e181L,1e182L,1e183L,1e184L,1e185L,1e186L,1e187L,1e188L,1e189L,1e190,
|
1e171L, 1e172L, 1e173L, 1e174L, 1e175L, 1e176L, 1e177L, 1e178L, 1e179L, 1e180L, 1e181L, 1e182L, 1e183L, 1e184L, 1e185L, 1e186L, 1e187L, 1e188L, 1e189L, 1e190,
|
||||||
1e191L,1e192L,1e193L,1e194L,1e195L,1e196L,1e197L,1e198L,1e199L,1e200L,1e201L,1e202L,1e203L,1e204L,1e205L,1e206L,1e207L,1e208L,1e209L,1e210,
|
1e191L, 1e192L, 1e193L, 1e194L, 1e195L, 1e196L, 1e197L, 1e198L, 1e199L, 1e200L, 1e201L, 1e202L, 1e203L, 1e204L, 1e205L, 1e206L, 1e207L, 1e208L, 1e209L, 1e210,
|
||||||
1e211L,1e212L,1e213L,1e214L,1e215L,1e216L,1e217L,1e218L,1e219L,1e220L,1e221L,1e222L,1e223L,1e224L,1e225L,1e226L,1e227L,1e228L,1e229L,1e230,
|
1e211L, 1e212L, 1e213L, 1e214L, 1e215L, 1e216L, 1e217L, 1e218L, 1e219L, 1e220L, 1e221L, 1e222L, 1e223L, 1e224L, 1e225L, 1e226L, 1e227L, 1e228L, 1e229L, 1e230,
|
||||||
1e231L,1e232L,1e233L,1e234L,1e235L,1e236L,1e237L,1e238L,1e239L,1e240L,1e241L,1e242L,1e243L,1e244L,1e245L,1e246L,1e247L,1e248L,1e249L,1e250,
|
1e231L, 1e232L, 1e233L, 1e234L, 1e235L, 1e236L, 1e237L, 1e238L, 1e239L, 1e240L, 1e241L, 1e242L, 1e243L, 1e244L, 1e245L, 1e246L, 1e247L, 1e248L, 1e249L, 1e250,
|
||||||
1e251L,1e252L,1e253L,1e254L,1e255L,1e256L,1e257L,1e258L,1e259L,1e260L,1e261L,1e262L,1e263L,1e264L,1e265L,1e266L,1e267L,1e268L,1e269L,1e270,
|
1e251L, 1e252L, 1e253L, 1e254L, 1e255L, 1e256L, 1e257L, 1e258L, 1e259L, 1e260L, 1e261L, 1e262L, 1e263L, 1e264L, 1e265L, 1e266L, 1e267L, 1e268L, 1e269L, 1e270,
|
||||||
1e271L,1e272L,1e273L,1e274L,1e275L,1e276L,1e277L,1e278L,1e279L,1e280L,1e281L,1e282L,1e283L,1e284L,1e285L,1e286L,1e287L,1e288L,1e289L,1e290,
|
1e271L, 1e272L, 1e273L, 1e274L, 1e275L, 1e276L, 1e277L, 1e278L, 1e279L, 1e280L, 1e281L, 1e282L, 1e283L, 1e284L, 1e285L, 1e286L, 1e287L, 1e288L, 1e289L, 1e290,
|
||||||
1e291L,1e292L,1e293L,1e294L,1e295L,1e296L,1e297L,1e298L,1e299L,1e300L,1e301L,1e302L,1e303L,1e304L,1e305L,1e306L,1e307L,1e308L
|
1e291L, 1e292L, 1e293L, 1e294L, 1e295L, 1e296L, 1e297L, 1e298L, 1e299L, 1e300L, 1e301L, 1e302L, 1e303L, 1e304L, 1e305L, 1e306L, 1e307L, 1e308L
|
||||||
};
|
};
|
||||||
|
|
||||||
if (unlikely(exponent < min_exponent)) /// Note: there are some values below MIN_EXPONENT that is greater than zero.
|
if (unlikely(exponent < min_exponent)) /// Note: there are some values below MIN_EXPONENT that is greater than zero.
|
||||||
@ -52,7 +52,7 @@ static T shift10Impl(T x, int exponent)
|
|||||||
else if (unlikely(exponent > max_exponent))
|
else if (unlikely(exponent > max_exponent))
|
||||||
x *= std::numeric_limits<T>::infinity(); /// Multiplying to keep the sign of infinity.
|
x *= std::numeric_limits<T>::infinity(); /// Multiplying to keep the sign of infinity.
|
||||||
else
|
else
|
||||||
x *= powers10[exponent - min_exponent];
|
x *= static_cast<T>(powers10[exponent - min_exponent]);
|
||||||
|
|
||||||
return x;
|
return x;
|
||||||
}
|
}
|
||||||
@ -68,12 +68,12 @@ float shift10(float x, int exponent)
|
|||||||
return shift10Impl(x, exponent);
|
return shift10Impl(x, exponent);
|
||||||
}
|
}
|
||||||
|
|
||||||
double shift10(UInt64 x, int exponent)
|
long double shift10(UInt64 x, int exponent)
|
||||||
{
|
{
|
||||||
return shift10Impl(static_cast<long double>(x), exponent);
|
return shift10Impl(static_cast<long double>(x), exponent);
|
||||||
}
|
}
|
||||||
|
|
||||||
double shift10(Int64 x, int exponent)
|
long double shift10(Int64 x, int exponent)
|
||||||
{
|
{
|
||||||
return shift10Impl(static_cast<long double>(x), exponent);
|
return shift10Impl(static_cast<long double>(x), exponent);
|
||||||
}
|
}
|
||||||
|
@ -12,5 +12,5 @@
|
|||||||
double shift10(double x, int exponent);
|
double shift10(double x, int exponent);
|
||||||
float shift10(float x, int exponent);
|
float shift10(float x, int exponent);
|
||||||
|
|
||||||
double shift10(UInt64 x, int exponent);
|
long double shift10(UInt64 x, int exponent);
|
||||||
double shift10(Int64 x, int exponent);
|
long double shift10(Int64 x, int exponent);
|
||||||
|
14
base/base/simd.h
Normal file
14
base/base/simd.h
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
#if defined(__aarch64__) && defined(__ARM_NEON)
|
||||||
|
|
||||||
|
# include <arm_neon.h>
|
||||||
|
# pragma clang diagnostic ignored "-Wreserved-identifier"
|
||||||
|
|
||||||
|
/// Returns a 64 bit mask of nibbles (4 bits for each byte).
|
||||||
|
inline uint64_t getNibbleMask(uint8x16_t res)
|
||||||
|
{
|
||||||
|
return vget_lane_u64(vreinterpret_u64_u8(vshrn_n_u16(vreinterpretq_u16_u8(res), 4)), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#endif
|
@ -1,7 +1,7 @@
|
|||||||
#include <base/sleep.h>
|
#include <base/sleep.h>
|
||||||
|
|
||||||
#include <time.h>
|
#include <ctime>
|
||||||
#include <errno.h>
|
#include <cerrno>
|
||||||
|
|
||||||
#if defined(OS_DARWIN)
|
#if defined(OS_DARWIN)
|
||||||
#include <mach/mach.h>
|
#include <mach/mach.h>
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user