mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-24 08:32:02 +00:00
Fix build and add example
This commit is contained in:
parent
d78e551de9
commit
f27f519aa2
@ -47,7 +47,7 @@ option (ENABLE_CLICKHOUSE_LIBRARY_BRIDGE "HTTP-server working like a proxy to Li
|
||||
# https://presentations.clickhouse.tech/matemarketing_2020/
|
||||
option (ENABLE_CLICKHOUSE_GIT_IMPORT "A tool to analyze Git repositories" ${ENABLE_CLICKHOUSE_ALL})
|
||||
|
||||
option (ENABLE_CLICKHOUSE_WEB_SERVER_EXPORTER "A tool to put table data files to a web server" ${ENABLE_CLICKHOUSE_ALL})
|
||||
option (ENABLE_CLICKHOUSE_STATIC_FILES_DISK_UPLOADER "A tool to put table data files to a web server" ${ENABLE_CLICKHOUSE_ALL})
|
||||
|
||||
option (ENABLE_CLICKHOUSE_KEEPER "ClickHouse alternative to ZooKeeper" ${ENABLE_CLICKHOUSE_ALL})
|
||||
|
||||
@ -227,7 +227,7 @@ add_subdirectory (obfuscator)
|
||||
add_subdirectory (install)
|
||||
add_subdirectory (git-import)
|
||||
add_subdirectory (bash-completion)
|
||||
add_subdirectory (web-server-exporter)
|
||||
add_subdirectory (static-files-disk-uploader)
|
||||
|
||||
if (ENABLE_CLICKHOUSE_KEEPER)
|
||||
add_subdirectory (keeper)
|
||||
@ -260,7 +260,7 @@ if (CLICKHOUSE_ONE_SHARED)
|
||||
${CLICKHOUSE_ODBC_BRIDGE_SOURCES}
|
||||
${CLICKHOUSE_KEEPER_SOURCES}
|
||||
${CLICKHOUSE_KEEPER_CONVERTER_SOURCES}
|
||||
${CLICKHOUSE_WEB_SERVER_EXPORTER_SOURCES})
|
||||
${CLICKHOUSE_STATIC_FILES_DISK_UPLOADER_SOURCES})
|
||||
|
||||
target_link_libraries(clickhouse-lib
|
||||
${CLICKHOUSE_SERVER_LINK}
|
||||
@ -276,7 +276,7 @@ if (CLICKHOUSE_ONE_SHARED)
|
||||
${CLICKHOUSE_ODBC_BRIDGE_LINK}
|
||||
${CLICKHOUSE_KEEPER_LINK}
|
||||
${CLICKHOUSE_KEEPER_CONVERTER_LINK}
|
||||
${CLICKHOUSE_WEB_SERVER_EXPORTER_LINK})
|
||||
${CLICKHOUSE_STATIC_FILES_DISK_UPLOADER_LINK})
|
||||
|
||||
target_include_directories(clickhouse-lib
|
||||
${CLICKHOUSE_SERVER_INCLUDE}
|
||||
@ -309,7 +309,7 @@ if (CLICKHOUSE_SPLIT_BINARY)
|
||||
clickhouse-obfuscator
|
||||
clickhouse-git-import
|
||||
clickhouse-copier
|
||||
clickhouse-web-server-exporter
|
||||
clickhouse-static-files-disk-uploader
|
||||
)
|
||||
|
||||
if (ENABLE_CLICKHOUSE_ODBC_BRIDGE)
|
||||
@ -375,8 +375,8 @@ else ()
|
||||
if (ENABLE_CLICKHOUSE_GIT_IMPORT)
|
||||
clickhouse_target_link_split_lib(clickhouse git-import)
|
||||
endif ()
|
||||
if (ENABLE_CLICKHOUSE_WEB_SERVER_EXPORTER)
|
||||
clickhouse_target_link_split_lib(clickhouse web-server-exporter)
|
||||
if (ENABLE_CLICKHOUSE_STATIC_FILES_DISK_UPLOADER)
|
||||
clickhouse_target_link_split_lib(clickhouse static-files-disk-uploader)
|
||||
endif ()
|
||||
if (ENABLE_CLICKHOUSE_KEEPER)
|
||||
clickhouse_target_link_split_lib(clickhouse keeper)
|
||||
@ -439,10 +439,10 @@ else ()
|
||||
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-git-import" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
|
||||
list(APPEND CLICKHOUSE_BUNDLE clickhouse-git-import)
|
||||
endif ()
|
||||
if (ENABLE_CLICKHOUSE_WEB_SERVER_EXPORTER)
|
||||
add_custom_target (clickhouse-web-server-exporter ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-web-server-exporter DEPENDS clickhouse)
|
||||
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-web-server-exporter" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
|
||||
list(APPEND CLICKHOUSE_BUNDLE clickhouse-web-server-exporter)
|
||||
if (ENABLE_CLICKHOUSE_STATIC_FILES_DISK_UPLOADER)
|
||||
add_custom_target (clickhouse-static-files-disk-uploader ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-static-files-disk-uploader DEPENDS clickhouse)
|
||||
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-static-files-disk-uploader" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
|
||||
list(APPEND CLICKHOUSE_BUNDLE clickhouse-static-files-disk-uploader)
|
||||
endif ()
|
||||
if (ENABLE_CLICKHOUSE_KEEPER)
|
||||
add_custom_target (clickhouse-keeper ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-keeper DEPENDS clickhouse)
|
||||
|
@ -18,4 +18,4 @@
|
||||
#cmakedefine01 ENABLE_CLICKHOUSE_LIBRARY_BRIDGE
|
||||
#cmakedefine01 ENABLE_CLICKHOUSE_KEEPER
|
||||
#cmakedefine01 ENABLE_CLICKHOUSE_KEEPER_CONVERTER
|
||||
#cmakedefine01 ENABLE_CLICKHOUSE_WEB_SERVER_EXPORTER
|
||||
#cmakedefine01 ENABLE_CLICKHOUSE_STATIC_FILES_DISK_UPLOADER
|
||||
|
@ -62,8 +62,8 @@ int mainEntryClickHouseKeeper(int argc, char ** argv);
|
||||
#if ENABLE_CLICKHOUSE_KEEPER
|
||||
int mainEntryClickHouseKeeperConverter(int argc, char ** argv);
|
||||
#endif
|
||||
#if ENABLE_CLICKHOUSE_WEB_SERVER_EXPORTER
|
||||
int mainEntryClickHouseWebServerExporter(int argc, char ** argv);
|
||||
#if ENABLE_CLICKHOUSE_STATIC_FILES_DISK_UPLOADER
|
||||
int mainEntryClickHouseStaticFilesDiskUploader(int argc, char ** argv);
|
||||
#endif
|
||||
#if ENABLE_CLICKHOUSE_INSTALL
|
||||
int mainEntryClickHouseInstall(int argc, char ** argv);
|
||||
@ -135,8 +135,8 @@ std::pair<const char *, MainFunc> clickhouse_applications[] =
|
||||
{"status", mainEntryClickHouseStatus},
|
||||
{"restart", mainEntryClickHouseRestart},
|
||||
#endif
|
||||
#if ENABLE_CLICKHOUSE_WEB_SERVER_EXPORTER
|
||||
{"web-server-exporter", mainEntryClickHouseWebServerExporter},
|
||||
#if ENABLE_CLICKHOUSE_STATIC_FILES_DISK_UPLOADER
|
||||
{"static-files-disk-uploader", mainEntryClickHouseStaticFilesDiskUploader},
|
||||
#endif
|
||||
{"hash-binary", mainEntryClickHouseHashBinary},
|
||||
};
|
||||
|
10
programs/static-files-disk-uploader/CMakeLists.txt
Normal file
10
programs/static-files-disk-uploader/CMakeLists.txt
Normal file
@ -0,0 +1,10 @@
|
||||
set (CLICKHOUSE_STATIC_FILES_DISK_UPLOADER_SOURCES clickhouse-static-files-disk-uploader.cpp)
|
||||
|
||||
set (CLICKHOUSE_STATIC_FILES_DISK_UPLOADER_LINK
|
||||
PRIVATE
|
||||
boost::program_options
|
||||
common
|
||||
dbms
|
||||
)
|
||||
|
||||
clickhouse_program_add(static-files-disk-uploader)
|
@ -28,14 +28,14 @@ namespace ErrorCodes
|
||||
}
|
||||
|
||||
/*
|
||||
* A tool to collect files on local fs as is (into current directory or into path from --output-dir option).
|
||||
* A tool to collect table data files on local fs as is (into current directory or into path from --output-dir option).
|
||||
* If test-mode option is added, files will be put by given url via PUT request.
|
||||
*/
|
||||
|
||||
void processTableFiles(const fs::path & path, const String & files_prefix, String uuid, WriteBuffer & metadata_buf, std::function<std::shared_ptr<WriteBuffer>(const String &)> create_dst_buf)
|
||||
void processTableFiles(const fs::path & path, const String & files_prefix, String uuid,
|
||||
WriteBuffer & metadata_buf, std::function<std::shared_ptr<WriteBuffer>(const String &)> create_dst_buf)
|
||||
{
|
||||
fs::directory_iterator dir_end;
|
||||
|
||||
auto process_file = [&](const String & file_name, const String & file_path)
|
||||
{
|
||||
auto remote_file_name = files_prefix + "-" + uuid + "-" + file_name;
|
||||
@ -58,10 +58,8 @@ void processTableFiles(const fs::path & path, const String & files_prefix, Strin
|
||||
{
|
||||
fs::directory_iterator files_end;
|
||||
for (fs::directory_iterator file_it(dir_it->path()); file_it != files_end; ++file_it)
|
||||
{
|
||||
process_file(dir_it->path().filename().string() + "-" + file_it->path().filename().string(), file_it->path());
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
process_file(dir_it->path().filename(), dir_it->path());
|
||||
@ -70,8 +68,7 @@ void processTableFiles(const fs::path & path, const String & files_prefix, Strin
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
int mainEntryClickHouseWebServerExporter(int argc, char ** argv)
|
||||
int mainEntryClickHouseStaticFilesDiskUploader(int argc, char ** argv)
|
||||
try
|
||||
{
|
||||
using namespace DB;
|
@ -1,9 +0,0 @@
|
||||
set (CLICKHOUSE_WEB_SERVER_EXPORTER_SOURCES clickhouse-web-server-exporter.cpp)
|
||||
|
||||
set (CLICKHOUSE_WEB_SERVER_EXPORTER_LINK
|
||||
PRIVATE
|
||||
boost::program_options
|
||||
dbms
|
||||
)
|
||||
|
||||
clickhouse_program_add(web-server-exporter)
|
@ -27,6 +27,30 @@ struct DiskWebServerSettings
|
||||
|
||||
|
||||
/*
|
||||
* Quick ready test - you can try this disk, by using these queries (disk has two tables) and this endpoint:
|
||||
*
|
||||
* ATTACH TABLE contributors UUID 'a563f7d8-fb00-4d50-a563-f7d8fb007d50' (good_person_name String) engine=MergeTree() order by good_person_name settings storage_policy='web';
|
||||
* ATTACH TABLE test UUID '11c7a2f9-a949-4c88-91c7-a2f9a949ec88' (a Int32) engine=MergeTree() order by a settings storage_policy='web';
|
||||
*
|
||||
* <storage_configuration>
|
||||
* <disks>
|
||||
* <web>
|
||||
* <type>web</type>
|
||||
* <endpoint>https://clickhouse-datasets.s3.yandex.net/kssenii-static-files-disk-test/kssenii-disk-tests/test1/</endpoint>
|
||||
* <files_prefix>data</files_prefix>
|
||||
* </web>
|
||||
* </disks>
|
||||
* <policies>
|
||||
* <web>
|
||||
* <volumes>
|
||||
* <main>
|
||||
* <disk>web</disk>
|
||||
* </main>
|
||||
* </volumes>
|
||||
* </web>
|
||||
* </policies>
|
||||
* </storage_configuration>
|
||||
*
|
||||
* If url is not reachable on disk load when server is starting up tables, then all errors are caught.
|
||||
* If in this case there were errors, tables can be reloaded (become visible) via detach table table_name -> attach table table_name.
|
||||
* If metadata was successfully loaded at server startup, then tables are available straight away.
|
||||
|
@ -26,7 +26,7 @@ def cluster():
|
||||
print(f'Metadata: {metadata_path}')
|
||||
|
||||
node1.exec_in_container(['bash', '-c',
|
||||
'/usr/bin/clickhouse web-server-exporter --test-mode --files-prefix data --url http://nginx:80/test1 --metadata-path {}'.format(metadata_path)], user='root')
|
||||
'/usr/bin/clickhouse static-files-disk-uploader --test-mode --files-prefix data --url http://nginx:80/test1 --metadata-path {}'.format(metadata_path)], user='root')
|
||||
parts = metadata_path.split('/')
|
||||
uuids.append(parts[3])
|
||||
print(f'UUID: {parts[3]}')
|
||||
|
Loading…
Reference in New Issue
Block a user