mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-10 09:32:06 +00:00
Rename azure_blob_storage to azureBlobStorage
This commit is contained in:
parent
a878312fdf
commit
a8b579a856
@ -5,7 +5,7 @@ sidebar_label: azure_blob_storage
|
||||
keywords: [azure blob storage]
|
||||
---
|
||||
|
||||
# azure\_blob\_storage Table Function
|
||||
# azureBlobStorage Table Function
|
||||
|
||||
Provides a table-like interface to select/insert files in [Azure Blob Storage](https://azure.microsoft.com/en-us/products/storage/blobs). This table function is similar to the [s3 function](../../sql-reference/table-functions/s3.md).
|
||||
|
||||
|
@ -246,7 +246,7 @@ void registerTableFunctionAzureBlobStorage(TableFunctionFactory & factory)
|
||||
factory.registerFunction<TableFunctionAzureBlobStorage>(
|
||||
{.documentation
|
||||
= {.description=R"(The table function can be used to read the data stored on Azure Blob Storage.)",
|
||||
.examples{{"azure_blob_storage", "SELECT * FROM azure_blob_storage(connection, container, blob_path, format, structure)", ""}}},
|
||||
.examples{{"azureBlobStorage", "SELECT * FROM azureBlobStorage(connection_string|storage_account_url, container_name, blobpath, [account_name, account_key, format, compression, structure])", ""}}},
|
||||
.allow_readonly = false});
|
||||
}
|
||||
|
||||
|
@ -18,7 +18,7 @@ class Context;
|
||||
class TableFunctionAzureBlobStorage : public ITableFunction
|
||||
{
|
||||
public:
|
||||
static constexpr auto name = "azure_blob_storage";
|
||||
static constexpr auto name = "azureBlobStorage";
|
||||
static constexpr auto signature = "- connection_string|storage_account_url, container_name, blobpath, [account_name, account_key, format, compression, structure]\n";
|
||||
|
||||
static size_t getMaxNumberOfArguments() { return 8; }
|
||||
|
@ -466,7 +466,7 @@ def test_simple_write_account_string_table_function(cluster):
|
||||
node = cluster.instances["node"]
|
||||
azure_query(
|
||||
node,
|
||||
"INSERT INTO TABLE FUNCTION azure_blob_storage('http://azurite1:10000/devstoreaccount1', 'cont', 'test_simple_write_tf.csv', 'devstoreaccount1', 'Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==', 'CSV', 'auto', 'key UInt64, data String') VALUES (1, 'a')",
|
||||
"INSERT INTO TABLE FUNCTION azureBlobStorage('http://azurite1:10000/devstoreaccount1', 'cont', 'test_simple_write_tf.csv', 'devstoreaccount1', 'Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==', 'CSV', 'auto', 'key UInt64, data String') VALUES (1, 'a')",
|
||||
)
|
||||
print(get_azure_file_content("test_simple_write_tf.csv"))
|
||||
assert get_azure_file_content("test_simple_write_tf.csv") == '1,"a"\n'
|
||||
@ -476,7 +476,7 @@ def test_simple_write_connection_string_table_function(cluster):
|
||||
node = cluster.instances["node"]
|
||||
azure_query(
|
||||
node,
|
||||
"INSERT INTO TABLE FUNCTION azure_blob_storage('DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://azurite1:10000/devstoreaccount1;', 'cont', 'test_simple_write_connection_tf.csv', 'CSV', 'auto', 'key UInt64, data String') VALUES (1, 'a')",
|
||||
"INSERT INTO TABLE FUNCTION azureBlobStorage('DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://azurite1:10000/devstoreaccount1;', 'cont', 'test_simple_write_connection_tf.csv', 'CSV', 'auto', 'key UInt64, data String') VALUES (1, 'a')",
|
||||
)
|
||||
print(get_azure_file_content("test_simple_write_connection_tf.csv"))
|
||||
assert get_azure_file_content("test_simple_write_connection_tf.csv") == '1,"a"\n'
|
||||
@ -486,7 +486,7 @@ def test_simple_write_named_collection_1_table_function(cluster):
|
||||
node = cluster.instances["node"]
|
||||
azure_query(
|
||||
node,
|
||||
"INSERT INTO TABLE FUNCTION azure_blob_storage(azure_conf1) VALUES (1, 'a')",
|
||||
"INSERT INTO TABLE FUNCTION azureBlobStorage(azure_conf1) VALUES (1, 'a')",
|
||||
)
|
||||
print(get_azure_file_content("test_simple_write_named.csv"))
|
||||
assert get_azure_file_content("test_simple_write_named.csv") == '1,"a"\n'
|
||||
@ -507,7 +507,7 @@ def test_simple_write_named_collection_2_table_function(cluster):
|
||||
|
||||
azure_query(
|
||||
node,
|
||||
"INSERT INTO TABLE FUNCTION azure_blob_storage(azure_conf2, container='cont', blob_path='test_simple_write_named_2_tf.csv', format='CSV', structure='key UInt64, data String') VALUES (1, 'a')",
|
||||
"INSERT INTO TABLE FUNCTION azureBlobStorage(azure_conf2, container='cont', blob_path='test_simple_write_named_2_tf.csv', format='CSV', structure='key UInt64, data String') VALUES (1, 'a')",
|
||||
)
|
||||
print(get_azure_file_content("test_simple_write_named_2_tf.csv"))
|
||||
assert get_azure_file_content("test_simple_write_named_2_tf.csv") == '1,"a"\n'
|
||||
@ -529,9 +529,9 @@ def test_put_get_with_globs_tf(cluster):
|
||||
|
||||
azure_query(
|
||||
node,
|
||||
f"INSERT INTO TABLE FUNCTION azure_blob_storage(azure_conf2, container='cont', blob_path='{path}', format='CSV', compression='auto', structure='{table_format}') VALUES {values}",
|
||||
f"INSERT INTO TABLE FUNCTION azureBlobStorage(azure_conf2, container='cont', blob_path='{path}', format='CSV', compression='auto', structure='{table_format}') VALUES {values}",
|
||||
)
|
||||
query = f"select sum(column1), sum(column2), sum(column3), min(_file), max(_path) from azure_blob_storage(azure_conf2, container='cont', blob_path='{unique_prefix}/*_{{a,b,c,d}}/?.csv', format='CSV', structure='{table_format}')"
|
||||
query = f"select sum(column1), sum(column2), sum(column3), min(_file), max(_path) from azureBlobStorage(azure_conf2, container='cont', blob_path='{unique_prefix}/*_{{a,b,c,d}}/?.csv', format='CSV', structure='{table_format}')"
|
||||
assert azure_query(node, query).splitlines() == [
|
||||
"450\t450\t900\t0.csv\t{bucket}/{max_path}".format(
|
||||
bucket="cont", max_path=max_path
|
||||
@ -543,10 +543,10 @@ def test_schema_inference_no_globs_tf(cluster):
|
||||
node = cluster.instances["node"] # type: ClickHouseInstance
|
||||
table_format = "column1 UInt32, column2 String, column3 UInt32"
|
||||
|
||||
query = f"insert into table function azure_blob_storage(azure_conf2, container='cont', blob_path='test_schema_inference_no_globs_tf.csv', format='CSVWithNames', structure='{table_format}') SELECT number, toString(number), number * number FROM numbers(1000)"
|
||||
query = f"insert into table function azureBlobStorage(azure_conf2, container='cont', blob_path='test_schema_inference_no_globs_tf.csv', format='CSVWithNames', structure='{table_format}') SELECT number, toString(number), number * number FROM numbers(1000)"
|
||||
azure_query(node, query)
|
||||
|
||||
query = "select sum(column1), sum(length(column2)), sum(column3), min(_file), max(_path) from azure_blob_storage(azure_conf2, container='cont', blob_path='test_schema_inference_no_globs_tf.csv')"
|
||||
query = "select sum(column1), sum(length(column2)), sum(column3), min(_file), max(_path) from azureBlobStorage(azure_conf2, container='cont', blob_path='test_schema_inference_no_globs_tf.csv')"
|
||||
assert azure_query(node, query).splitlines() == [
|
||||
"499500\t2890\t332833500\ttest_schema_inference_no_globs_tf.csv\tcont/test_schema_inference_no_globs_tf.csv"
|
||||
]
|
||||
@ -566,10 +566,10 @@ def test_schema_inference_from_globs_tf(cluster):
|
||||
max_path = max(path, max_path)
|
||||
values = f"({i},{j},{i + j})"
|
||||
|
||||
query = f"insert into table function azure_blob_storage(azure_conf2, container='cont', blob_path='{path}', format='CSVWithNames', structure='{table_format}') VALUES {values}"
|
||||
query = f"insert into table function azureBlobStorage(azure_conf2, container='cont', blob_path='{path}', format='CSVWithNames', structure='{table_format}') VALUES {values}"
|
||||
azure_query(node, query)
|
||||
|
||||
query = f"select sum(column1), sum(column2), sum(column3), min(_file), max(_path) from azure_blob_storage(azure_conf2, container='cont', blob_path='{unique_prefix}/*_{{a,b,c,d}}/?.csv')"
|
||||
query = f"select sum(column1), sum(column2), sum(column3), min(_file), max(_path) from azureBlobStorage(azure_conf2, container='cont', blob_path='{unique_prefix}/*_{{a,b,c,d}}/?.csv')"
|
||||
assert azure_query(node, query).splitlines() == [
|
||||
"450\t450\t900\t0.csv\t{bucket}/{max_path}".format(
|
||||
bucket="cont", max_path=max_path
|
||||
@ -586,7 +586,7 @@ def test_partition_by_tf(cluster):
|
||||
|
||||
azure_query(
|
||||
node,
|
||||
f"INSERT INTO TABLE FUNCTION azure_blob_storage('http://azurite1:10000/devstoreaccount1', 'cont', '{filename}', 'devstoreaccount1', 'Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==', 'CSV', 'auto', '{table_format}') PARTITION BY {partition_by} VALUES {values}",
|
||||
f"INSERT INTO TABLE FUNCTION azureBlobStorage('http://azurite1:10000/devstoreaccount1', 'cont', '{filename}', 'devstoreaccount1', 'Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==', 'CSV', 'auto', '{table_format}') PARTITION BY {partition_by} VALUES {values}",
|
||||
)
|
||||
|
||||
assert "1,2,3\n" == get_azure_file_content("test_tf_3.csv")
|
||||
|
Loading…
Reference in New Issue
Block a user