mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-25 17:12:03 +00:00
trivial
This commit is contained in:
parent
ecf928737b
commit
34fd15ca40
@ -23,7 +23,7 @@ def started_cluster():
|
|||||||
cluster.shutdown()
|
cluster.shutdown()
|
||||||
|
|
||||||
def test_read_table(started_cluster):
|
def test_read_table(started_cluster):
|
||||||
hdfs_api = started_cluster.make_hdfs_api()
|
hdfs_api = started_cluster.make_hdfs_api(kerberized=True)
|
||||||
|
|
||||||
data = "1\tSerialize\t555.222\n2\tData\t777.333\n"
|
data = "1\tSerialize\t555.222\n2\tData\t777.333\n"
|
||||||
hdfs_api.write_data("/simple_table_function", data)
|
hdfs_api.write_data("/simple_table_function", data)
|
||||||
@ -36,7 +36,7 @@ def test_read_table(started_cluster):
|
|||||||
|
|
||||||
|
|
||||||
def test_read_write_storage(started_cluster):
|
def test_read_write_storage(started_cluster):
|
||||||
hdfs_api = started_cluster.make_hdfs_api()
|
hdfs_api = started_cluster.make_hdfs_api(kerberized=True)
|
||||||
|
|
||||||
node1.query("create table SimpleHDFSStorage2 (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://kerberizedhdfs1:9010/simple_storage1', 'TSV')")
|
node1.query("create table SimpleHDFSStorage2 (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://kerberizedhdfs1:9010/simple_storage1', 'TSV')")
|
||||||
node1.query("insert into SimpleHDFSStorage2 values (1, 'Mark', 72.53)")
|
node1.query("insert into SimpleHDFSStorage2 values (1, 'Mark', 72.53)")
|
||||||
@ -49,7 +49,7 @@ def test_read_write_storage(started_cluster):
|
|||||||
|
|
||||||
|
|
||||||
def test_write_storage_not_expired(started_cluster):
|
def test_write_storage_not_expired(started_cluster):
|
||||||
hdfs_api = started_cluster.make_hdfs_api()
|
hdfs_api = started_cluster.make_hdfs_api(kerberized=True)
|
||||||
|
|
||||||
node1.query("create table SimpleHDFSStorageNotExpired (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://kerberizedhdfs1:9010/simple_storage_not_expired', 'TSV')")
|
node1.query("create table SimpleHDFSStorageNotExpired (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://kerberizedhdfs1:9010/simple_storage_not_expired', 'TSV')")
|
||||||
|
|
||||||
@ -64,7 +64,7 @@ def test_write_storage_not_expired(started_cluster):
|
|||||||
|
|
||||||
|
|
||||||
def test_two_users(started_cluster):
|
def test_two_users(started_cluster):
|
||||||
hdfs_api = started_cluster.make_hdfs_api()
|
hdfs_api = started_cluster.make_hdfs_api(kerberized=True)
|
||||||
|
|
||||||
node1.query("create table HDFSStorOne (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://kerberizedhdfs1:9010/storage_user_one', 'TSV')")
|
node1.query("create table HDFSStorOne (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://kerberizedhdfs1:9010/storage_user_one', 'TSV')")
|
||||||
node1.query("insert into HDFSStorOne values (1, 'Real', 86.00)")
|
node1.query("insert into HDFSStorOne values (1, 'Real', 86.00)")
|
||||||
@ -77,7 +77,7 @@ def test_two_users(started_cluster):
|
|||||||
select_read_2 = node1.query("select * from hdfs('hdfs://suser@kerberizedhdfs1:9010/storage_user_one', 'TSV', 'id UInt64, text String, number Float64')")
|
select_read_2 = node1.query("select * from hdfs('hdfs://suser@kerberizedhdfs1:9010/storage_user_one', 'TSV', 'id UInt64, text String, number Float64')")
|
||||||
|
|
||||||
def test_read_table_expired(started_cluster):
|
def test_read_table_expired(started_cluster):
|
||||||
hdfs_api = started_cluster.make_hdfs_api()
|
hdfs_api = started_cluster.make_hdfs_api(kerberized=True)
|
||||||
|
|
||||||
data = "1\tSerialize\t555.222\n2\tData\t777.333\n"
|
data = "1\tSerialize\t555.222\n2\tData\t777.333\n"
|
||||||
hdfs_api.write_data("/simple_table_function_relogin", data)
|
hdfs_api.write_data("/simple_table_function_relogin", data)
|
||||||
|
Loading…
Reference in New Issue
Block a user