mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-21 15:12:02 +00:00
Adding 'clean' script to clean up _instances folders.
Updating RBAC to run on the latest master. Fixing ldap SRS reference issue.
This commit is contained in:
parent
04a9dbeb2b
commit
4b87ae684c
9
tests/testflows/clean
Executable file
9
tests/testflows/clean
Executable file
@ -0,0 +1,9 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Clean up all the _instances folders where
|
||||||
|
# ClickHouse services store their data.
|
||||||
|
#
|
||||||
|
# Note: needs root privilege because Docker mounts
|
||||||
|
# these folders into containers.
|
||||||
|
#
|
||||||
|
find . -type d -name _instances | sudo xargs rm -rf
|
@ -111,6 +111,11 @@ def regression(self, local, clickhouse_binary_path, stress=None, parallel=None):
|
|||||||
("clickhouse1", "clickhouse2", "clickhouse3")
|
("clickhouse1", "clickhouse2", "clickhouse3")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if stress is not None:
|
||||||
|
self.context.stress = stress
|
||||||
|
if parallel is not None:
|
||||||
|
self.context.parallel = parallel
|
||||||
|
|
||||||
with Cluster(local, clickhouse_binary_path, nodes=nodes,
|
with Cluster(local, clickhouse_binary_path, nodes=nodes,
|
||||||
docker_compose_project_dir=os.path.join(current_dir(), "map_type_env")) as cluster:
|
docker_compose_project_dir=os.path.join(current_dir(), "map_type_env")) as cluster:
|
||||||
self.context.cluster = cluster
|
self.context.cluster = cluster
|
||||||
|
@ -138,6 +138,12 @@ xfails = {
|
|||||||
[(Fail, issue_21083)],
|
[(Fail, issue_21083)],
|
||||||
"privileges/: row policy/nested mat:":
|
"privileges/: row policy/nested mat:":
|
||||||
[(Fail, issue_21084)],
|
[(Fail, issue_21084)],
|
||||||
|
"privileges/show dictionaries/:/check privilege/check privilege=SHOW DICTIONARIES/show dict/SHOW DICTIONARIES with privilege":
|
||||||
|
[(Fail, "new bug")],
|
||||||
|
"privileges/show dictionaries/:/check privilege/check privilege=CREATE DICTIONARY/show dict/SHOW DICTIONARIES with privilege":
|
||||||
|
[(Fail, "new bug")],
|
||||||
|
"privileges/show dictionaries/:/check privilege/check privilege=DROP DICTIONARY/show dict/SHOW DICTIONARIES with privilege":
|
||||||
|
[(Fail, "new bug")],
|
||||||
}
|
}
|
||||||
|
|
||||||
xflags = {
|
xflags = {
|
||||||
|
@ -59,7 +59,7 @@ def privilege_check(grant_target_name, user_name, node=None):
|
|||||||
|
|
||||||
with Then("I attempt to attach a dictionary"):
|
with Then("I attempt to attach a dictionary"):
|
||||||
node.query(f"ATTACH DICTIONARY {dict_name}", settings = [("user", user_name)],
|
node.query(f"ATTACH DICTIONARY {dict_name}", settings = [("user", user_name)],
|
||||||
exitcode=231, message=f"DB::Exception: Dictionary `{dict_name}` doesn't exist.")
|
exitcode=134, message=f"DB::Exception: Table `{dict_name}` doesn't exist.")
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
with Finally("I drop the dictionary"):
|
with Finally("I drop the dictionary"):
|
||||||
@ -110,7 +110,7 @@ def privilege_check(grant_target_name, user_name, node=None):
|
|||||||
|
|
||||||
with Then("I attempt to attach a dictionary"):
|
with Then("I attempt to attach a dictionary"):
|
||||||
node.query(f"ATTACH DICTIONARY {dict_name}", settings = [("user", user_name)],
|
node.query(f"ATTACH DICTIONARY {dict_name}", settings = [("user", user_name)],
|
||||||
exitcode=231, message=f"DB::Exception: Dictionary `{dict_name}` doesn't exist.")
|
exitcode=134, message=f"DB::Exception: Table `{dict_name}` doesn't exist.")
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
with Finally("I drop the dictionary"):
|
with Finally("I drop the dictionary"):
|
||||||
|
@ -16,13 +16,16 @@ def regression(self, local, clickhouse_binary_path, stress=None, parallel=None):
|
|||||||
top().terminating = False
|
top().terminating = False
|
||||||
args = {"local": local, "clickhouse_binary_path": clickhouse_binary_path, "stress": stress, "parallel": parallel}
|
args = {"local": local, "clickhouse_binary_path": clickhouse_binary_path, "stress": stress, "parallel": parallel}
|
||||||
|
|
||||||
|
self.context.stress = stress
|
||||||
|
self.context.parallel = parallel
|
||||||
|
|
||||||
tasks = []
|
tasks = []
|
||||||
with Pool(7) as pool:
|
with Pool(7) as pool:
|
||||||
try:
|
try:
|
||||||
# run_scenario(pool, tasks, Feature(test=load("example.regression", "regression")), args)
|
run_scenario(pool, tasks, Feature(test=load("example.regression", "regression")), args)
|
||||||
# run_scenario(pool, tasks, Feature(test=load("ldap.regression", "regression")), args)
|
run_scenario(pool, tasks, Feature(test=load("ldap.regression", "regression")), args)
|
||||||
# run_scenario(pool, tasks, Feature(test=load("rbac.regression", "regression")), args)
|
run_scenario(pool, tasks, Feature(test=load("rbac.regression", "regression")), args)
|
||||||
# run_scenario(pool, tasks, Feature(test=load("aes_encryption.regression", "regression")), args)
|
run_scenario(pool, tasks, Feature(test=load("aes_encryption.regression", "regression")), args)
|
||||||
run_scenario(pool, tasks, Feature(test=load("map_type.regression", "regression")), args)
|
run_scenario(pool, tasks, Feature(test=load("map_type.regression", "regression")), args)
|
||||||
run_scenario(pool, tasks, Feature(test=load("window_functions.regression", "regression")), args)
|
run_scenario(pool, tasks, Feature(test=load("window_functions.regression", "regression")), args)
|
||||||
# run_scenario(pool, tasks, Feature(test=load("kerberos.regression", "regression")), args)
|
# run_scenario(pool, tasks, Feature(test=load("kerberos.regression", "regression")), args)
|
||||||
|
@ -86,6 +86,11 @@ def regression(self, local, clickhouse_binary_path, stress=None, parallel=None):
|
|||||||
("clickhouse1", "clickhouse2", "clickhouse3")
|
("clickhouse1", "clickhouse2", "clickhouse3")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if stress is not None:
|
||||||
|
self.context.stress = stress
|
||||||
|
if parallel is not None:
|
||||||
|
self.context.parallel = parallel
|
||||||
|
|
||||||
with Cluster(local, clickhouse_binary_path, nodes=nodes,
|
with Cluster(local, clickhouse_binary_path, nodes=nodes,
|
||||||
docker_compose_project_dir=os.path.join(current_dir(), "window_functions_env")) as cluster:
|
docker_compose_project_dir=os.path.join(current_dir(), "window_functions_env")) as cluster:
|
||||||
self.context.cluster = cluster
|
self.context.cluster = cluster
|
||||||
|
Loading…
Reference in New Issue
Block a user