Replace calls of run_scenario() and pool.apply_async() because Testflows has upgraded to 1.7.20.

See also https://github.com/ClickHouse/ClickHouse/pull/27040
This commit is contained in:
Vitaly Baranov 2022-01-11 22:11:38 +07:00
parent 5d0e8eb500
commit 27625bfd47
27 changed files with 230 additions and 256 deletions

View File

@ -40,7 +40,6 @@ xfails = {
def regression(self, local, clickhouse_binary_path, stress=None, parallel=None):
"""ClickHouse integration with LDAP regression module.
"""
top().terminating = False
nodes = {
"clickhouse": ("clickhouse1", "clickhouse2", "clickhouse3"),
}

View File

@ -2,7 +2,7 @@
import random
import time
from helpers.common import Pool, join
from helpers.common import Pool
from testflows.core import *
from testflows.asserts import error
from ldap.authentication.tests.common import *
@ -107,13 +107,14 @@ def parallel_login(self, server, user_count=10, timeout=300, rbac=False):
with Pool(4) as pool:
try:
for i in range(5):
tasks.append(pool.apply_async(login_with_valid_username_and_password, (users, i, 50,)))
tasks.append(pool.apply_async(login_with_valid_username_and_invalid_password, (users, i, 50,)))
tasks.append(pool.apply_async(login_with_invalid_username_and_valid_password, (users, i, 50,)))
tasks.append(pool.submit(login_with_valid_username_and_password, (users, i, 50,)))
tasks.append(pool.submit(login_with_valid_username_and_invalid_password, (users, i, 50,)))
tasks.append(pool.submit(login_with_invalid_username_and_valid_password, (users, i, 50,)))
finally:
with Then("it should work"):
join(tasks, timeout=timeout)
for task in tasks:
task.result(timeout=timeout)
@TestScenario
@Requirements(
RQ_SRS_007_LDAP_Authentication_Invalid("1.0"),

View File

@ -40,7 +40,6 @@ xfails = {
def regression(self, local, clickhouse_binary_path, stress=None, parallel=None):
"""ClickHouse LDAP external user directory regression module.
"""
top().terminating = False
nodes = {
"clickhouse": ("clickhouse1", "clickhouse2", "clickhouse3"),
}

View File

@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
import random
from helpers.common import Pool, join
from helpers.common import Pool
from testflows.core import *
from testflows.asserts import error
@ -102,13 +102,14 @@ def parallel_login(self, server, user_count=10, timeout=300):
* with valid username and invalid password
"""):
for i in range(10):
tasks.append(pool.apply_async(login_with_valid_username_and_password, (users, i, 50,)))
tasks.append(pool.apply_async(login_with_valid_username_and_invalid_password, (users, i, 50,)))
tasks.append(pool.apply_async(login_with_invalid_username_and_valid_password, (users, i, 50,)))
tasks.append(pool.submit(login_with_valid_username_and_password, (users, i, 50,)))
tasks.append(pool.submit(login_with_valid_username_and_invalid_password, (users, i, 50,)))
tasks.append(pool.submit(login_with_invalid_username_and_valid_password, (users, i, 50,)))
finally:
with Then("it should work"):
join(tasks, timeout)
for task in tasks:
task.result(timeout=timeout)
@TestScenario
@Requirements(
@ -135,12 +136,13 @@ def parallel_login_with_the_same_user(self, server, timeout=300):
* with valid username and invalid password
"""):
for i in range(10):
tasks.append(pool.apply_async(login_with_valid_username_and_password, (users, i, 50,)))
tasks.append(pool.apply_async(login_with_valid_username_and_invalid_password, (users, i, 50,)))
tasks.append(pool.apply_async(login_with_invalid_username_and_valid_password, (users, i, 50,)))
tasks.append(pool.submit(login_with_valid_username_and_password, (users, i, 50,)))
tasks.append(pool.submit(login_with_valid_username_and_invalid_password, (users, i, 50,)))
tasks.append(pool.submit(login_with_invalid_username_and_valid_password, (users, i, 50,)))
finally:
with Then("it should work"):
join(tasks, timeout)
for task in tasks:
task.result(timeout=timeout)
@TestScenario
@Tags("custom config")
@ -192,12 +194,13 @@ def parallel_login_with_the_same_user_multiple_servers(self, server, timeout=300
* with valid username and invalid password
"""):
for i in range(10):
tasks.append(pool.apply_async(login_with_valid_username_and_password, (users, i, 50,)))
tasks.append(pool.apply_async(login_with_valid_username_and_invalid_password, (users, i, 50,)))
tasks.append(pool.apply_async(login_with_invalid_username_and_valid_password, (users, i, 50,)))
tasks.append(pool.submit(login_with_valid_username_and_password, (users, i, 50,)))
tasks.append(pool.submit(login_with_valid_username_and_invalid_password, (users, i, 50,)))
tasks.append(pool.submit(login_with_invalid_username_and_valid_password, (users, i, 50,)))
finally:
with Then("it should work"):
join(tasks, timeout)
for task in tasks:
task.result(timeout=timeout)
@TestScenario
@Tags("custom config")
@ -245,10 +248,11 @@ def parallel_login_with_multiple_servers(self, server, user_count=10, timeout=30
for i in range(10):
for users in user_groups.values():
for check in checks:
tasks.append(pool.apply_async(check, (users, i, 50,)))
tasks.append(pool.submit(check, (users, i, 50,)))
finally:
with Then("it should work"):
join(tasks, timeout)
for task in tasks:
task.result(timeout=timeout)
@TestScenario
@Tags("custom config")
@ -299,10 +303,11 @@ def parallel_login_with_rbac_and_multiple_servers(self, server, user_count=10, t
for i in range(10):
for users in user_groups.values():
for check in checks:
tasks.append(pool.apply_async(check, (users, i, 50,)))
tasks.append(pool.submit(check, (users, i, 50,)))
finally:
with Then("it should work"):
join(tasks, timeout)
for task in tasks:
task.result(timeout=timeout)
@TestScenario
@Requirements(
@ -323,12 +328,13 @@ def parallel_login_with_rbac_users(self, server, user_count=10, timeout=300):
try:
with When("I login in parallel"):
for i in range(10):
tasks.append(pool.apply_async(login_with_valid_username_and_password, (users, i, 50,)))
tasks.append(pool.apply_async(login_with_valid_username_and_invalid_password, (users, i, 50,)))
tasks.append(pool.apply_async(login_with_invalid_username_and_valid_password, (users, i, 50,)))
tasks.append(pool.submit(login_with_valid_username_and_password, (users, i, 50,)))
tasks.append(pool.submit(login_with_valid_username_and_invalid_password, (users, i, 50,)))
tasks.append(pool.submit(login_with_invalid_username_and_valid_password, (users, i, 50,)))
finally:
with Then("it should work"):
join(tasks, timeout)
for task in tasks:
task.result(timeout=timeout)
@TestScenario
@Requirements(

View File

@ -1,6 +1,6 @@
import random
from helpers.common import Pool, join
from helpers.common import Pool
from testflows.core import *
from testflows.asserts import error
@ -267,12 +267,13 @@ def parallel_login(self, server=None, user_count=10, timeout=300):
with When("I restart the server during parallel login of users in each group"):
for users in user_groups.values():
for check in checks:
tasks.append(pool.apply_async(check, (users, 0, 25, True)))
tasks.append(pool.submit(check, (users, 0, 25, True)))
tasks.append(pool.apply_async(restart))
tasks.append(pool.submit(restart))
finally:
with Then("logins during restart should work"):
join(tasks, timeout)
for task in tasks:
task.result(timeout=timeout)
tasks = []
with Pool(4) as pool:
@ -280,10 +281,11 @@ def parallel_login(self, server=None, user_count=10, timeout=300):
with When("I perform parallel login of users in each group after restart"):
for users in user_groups.values():
for check in checks:
tasks.append(pool.apply_async(check, (users, 0, 10, False)))
tasks.append(pool.submit(check, (users, 0, 10, False)))
finally:
with Then("logins after restart should work"):
join(tasks, timeout)
for task in tasks:
task.result(timeout=timeout)
@TestOutline(Feature)
@Name("restart")

View File

@ -4,7 +4,7 @@ from testflows.core import *
append_path(sys.path, "..")
from helpers.common import Pool, join, run_scenario
from helpers.common import Pool, join
from helpers.argparser import argparser
@TestModule
@ -13,22 +13,18 @@ from helpers.argparser import argparser
def regression(self, local, clickhouse_binary_path, parallel=None, stress=None):
"""ClickHouse LDAP integration regression module.
"""
top().terminating = False
args = {"local": local, "clickhouse_binary_path": clickhouse_binary_path}
if stress is not None:
self.context.stress = stress
if parallel is not None:
self.context.parallel = parallel
tasks = []
with Pool(3) as pool:
try:
run_scenario(pool, tasks, Feature(test=load("ldap.authentication.regression", "regression")), args)
run_scenario(pool, tasks, Feature(test=load("ldap.external_user_directory.regression", "regression")), args)
run_scenario(pool, tasks, Feature(test=load("ldap.role_mapping.regression", "regression")), args)
Feature(test=load("ldap.authentication.regression", "regression"), parallel=True, executor=pool)(**args)
Feature(test=load("ldap.external_user_directory.regression", "regression"), parallel=True, executor=pool)(**args)
Feature(test=load("ldap.role_mapping.regression", "regression"), parallel=True, executor=pool)(**args)
finally:
join(tasks)
join()
if main():
regression()

View File

@ -30,7 +30,6 @@ xfails = {
def regression(self, local, clickhouse_binary_path, stress=None, parallel=None):
"""ClickHouse LDAP role mapping regression module.
"""
top().terminating = False
nodes = {
"clickhouse": ("clickhouse1", "clickhouse2", "clickhouse3"),
}

View File

@ -2,7 +2,7 @@
from testflows.core import *
from testflows.asserts import error
from helpers.common import Pool, join
from helpers.common import Pool
from ldap.role_mapping.requirements import *
from ldap.role_mapping.tests.common import *
@ -1053,12 +1053,13 @@ def group_removed_and_added_in_parallel(self, ldap_server, ldap_user, count=20,
try:
with When("user try to login while LDAP groups are added and removed in parallel"):
for i in range(10):
tasks.append(pool.apply_async(login_with_valid_username_and_password, (users, i, 50,)))
tasks.append(pool.apply_async(remove_ldap_groups_in_parallel, (groups, i, 10,)))
tasks.append(pool.apply_async(add_ldap_groups_in_parallel,(ldap_user, role_names, i, 10,)))
tasks.append(pool.submit(login_with_valid_username_and_password, (users, i, 50,)))
tasks.append(pool.submit(remove_ldap_groups_in_parallel, (groups, i, 10,)))
tasks.append(pool.submit(add_ldap_groups_in_parallel,(ldap_user, role_names, i, 10,)))
finally:
with Finally("it should work", flags=TE):
join(tasks, timeout)
for task in tasks:
task.result(timeout=timeout)
finally:
with Finally("I clean up all LDAP groups"):
for group in groups:
@ -1105,12 +1106,13 @@ def user_removed_and_added_in_ldap_groups_in_parallel(self, ldap_server, ldap_us
try:
with When("user try to login while user is added and removed from LDAP groups in parallel"):
for i in range(10):
tasks.append(pool.apply_async(login_with_valid_username_and_password, (users, i, 50,)))
tasks.append(pool.apply_async(remove_user_from_ldap_groups_in_parallel, (ldap_user, groups, i, 1,)))
tasks.append(pool.apply_async(add_user_to_ldap_groups_in_parallel, (ldap_user, groups, i, 1,)))
tasks.append(pool.submit(login_with_valid_username_and_password, (users, i, 50,)))
tasks.append(pool.submit(remove_user_from_ldap_groups_in_parallel, (ldap_user, groups, i, 1,)))
tasks.append(pool.submit(add_user_to_ldap_groups_in_parallel, (ldap_user, groups, i, 1,)))
finally:
with Finally("it should work", flags=TE):
join(tasks, timeout)
for task in tasks:
task.result(timeout=timeout)
@TestScenario
@Requirements(
@ -1154,12 +1156,13 @@ def roles_removed_and_added_in_parallel(self, ldap_server, ldap_user, count=20,
try:
with When("user try to login while mapped roles are added and removed in parallel"):
for i in range(10):
tasks.append(pool.apply_async(login_with_valid_username_and_password, (users, i, 50,)))
tasks.append(pool.apply_async(remove_roles_in_parallel, (role_names, i, 10,)))
tasks.append(pool.apply_async(add_roles_in_parallel, (role_names, i, 10,)))
tasks.append(pool.submit(login_with_valid_username_and_password, (users, i, 50,)))
tasks.append(pool.submit(remove_roles_in_parallel, (role_names, i, 10,)))
tasks.append(pool.submit(add_roles_in_parallel, (role_names, i, 10,)))
finally:
with Finally("it should work", flags=TE):
join(tasks, timeout)
for task in tasks:
task.result(timeout=timeout)
with And("I clean up all the roles"):
for role_name in role_names:
@ -1213,12 +1216,13 @@ def parallel_login(self, ldap_server, ldap_user, user_count=10, timeout=200, rol
* with valid username and invalid password
"""):
for i in range(10):
tasks.append(pool.apply_async(login_with_valid_username_and_password, (users, i, 50,)))
tasks.append(pool.apply_async(login_with_valid_username_and_invalid_password, (users, i, 50,)))
tasks.append(pool.apply_async(login_with_invalid_username_and_valid_password, (users, i, 50,)))
tasks.append(pool.submit(login_with_valid_username_and_password, (users, i, 50,)))
tasks.append(pool.submit(login_with_valid_username_and_invalid_password, (users, i, 50,)))
tasks.append(pool.submit(login_with_invalid_username_and_valid_password, (users, i, 50,)))
finally:
with Then("it should work"):
join(tasks, timeout)
for task in tasks:
task.result(timeout=timeout)
@TestScenario
@Requirements(
@ -1395,10 +1399,11 @@ def parallel_login_with_multiple_servers(self, ldap_server, ldap_user, user_coun
for i in range(10):
for users in user_groups.values():
for check in checks:
tasks.append(pool.apply_async(check, (users, i, 50,)))
tasks.append(pool.submit(check, (users, i, 50,)))
finally:
with Then("it should work"):
join(tasks, timeout)
for task in tasks:
task.result(timeout=timeout)
@TestFeature
@Name("mapping")

View File

@ -8,7 +8,7 @@ from testflows.core.name import basename, parentname
from testflows._core.testtype import TestSubType
from testflows.core import *
from helpers.common import Pool, join, run_scenario, instrument_clickhouse_server_log
from helpers.common import Pool, join, instrument_clickhouse_server_log
from rbac.helper.tables import table_types
def permutations(table_count=1):

View File

@ -178,7 +178,6 @@ xflags = {
def regression(self, local, clickhouse_binary_path, stress=None, parallel=None):
"""RBAC regression.
"""
top().terminating = False
nodes = {
"clickhouse":
("clickhouse1", "clickhouse2", "clickhouse3")

View File

@ -699,14 +699,13 @@ def user_with_privileges_on_cluster(self, permutation, table_type, node=None):
@TestSuite
def scenario_parallelization(self, table_type, permutation):
args = {"table_type": table_type, "permutation": permutation}
with Pool(7) as pool:
tasks = []
try:
for scenario in loads(current_module(), Scenario):
run_scenario(pool, tasks, Scenario(test=scenario, setup=instrument_clickhouse_server_log),
{"table_type": table_type, "permutation": permutation})
Scenario(test=scenario, setup=instrument_clickhouse_server_log, parallel=True, executor=pool)(**args)
finally:
join(tasks)
join()
@TestFeature
@Requirements(
@ -719,13 +718,11 @@ def scenario_parallelization(self, table_type, permutation):
(key,) for key in table_types.keys()
])
@Name("alter column")
def feature(self, node="clickhouse1", stress=None, parallel=None):
def feature(self, node="clickhouse1", stress=None):
"""Runs test suites above which check correctness over scenarios and permutations.
"""
self.context.node = self.context.cluster.node(node)
if parallel is not None:
self.context.parallel = parallel
if stress is not None:
self.context.stress = stress
@ -737,12 +734,10 @@ def feature(self, node="clickhouse1", stress=None, parallel=None):
with Example(str(example)):
with Pool(10) as pool:
tasks = []
try:
for permutation in permutations(table_type):
privileges = alter_column_privileges(permutation)
run_scenario(pool, tasks, Suite(test=scenario_parallelization, name=privileges),
{"table_type": table_type, "permutation": permutation})
args = {"table_type": table_type, "permutation": permutation}
Suite(test=scenario_parallelization, name=privileges, parallel=True, executor=pool)(**args)
finally:
join(tasks)
join()

View File

@ -1,7 +1,6 @@
import json
from testflows.core import *
from testflows.core import threading
from testflows.asserts import error
from rbac.requirements import *
@ -282,11 +281,9 @@ def user_with_privileges_on_cluster(self, table_type, node=None):
(key,) for key in table_types.keys()
])
@Name("alter constraint")
def feature(self, node="clickhouse1", parallel=None, stress=None):
def feature(self, node="clickhouse1", stress=None):
self.context.node = self.context.cluster.node(node)
if parallel is not None:
self.context.parallel = parallel
if stress is not None:
self.context.stress = stress
@ -296,11 +293,12 @@ def feature(self, node="clickhouse1", parallel=None, stress=None):
if table_type != "MergeTree" and not self.context.stress:
continue
args = {"table_type" : table_type}
with Example(str(example)):
with Pool(5) as pool:
tasks = []
try:
for scenario in loads(current_module(), Scenario):
run_scenario(pool, tasks, Scenario(test=scenario, setup=instrument_clickhouse_server_log), {"table_type" : table_type})
Scenario(test=scenario, setup=instrument_clickhouse_server_log, parallel=True, executor=pool)(**args)
finally:
join(tasks)
join()

View File

@ -460,11 +460,12 @@ def feature(self, node="clickhouse1", stress=None, parallel=None):
if table_type != "MergeTree" and not self.context.stress:
continue
args = {"table_type" : table_type}
with Example(str(example)):
with Pool(5) as pool:
tasks = []
try:
for scenario in loads(current_module(), Scenario):
run_scenario(pool, tasks, Scenario(test=scenario, setup=instrument_clickhouse_server_log), {"table_type" : table_type})
Scenario(test=scenario, setup=instrument_clickhouse_server_log, parallel=True, executor=pool)(**args)
finally:
join(tasks)
join()

View File

@ -171,13 +171,14 @@ def user_with_privileges_on_cluster(self, privilege, table_type, node=None):
def scenario_parallelization(self, table_type, privilege):
"""Runs all scenarios in parallel for a given privilege.
"""
args = {"table_type": table_type, "privilege": privilege}
with Pool(4) as pool:
tasks = []
try:
for scenario in loads(current_module(), Scenario):
run_scenario(pool, tasks, Scenario(test=scenario), {"table_type": table_type, "privilege": privilege})
Scenario(test=scenario, parallel=True, executor=pool)(**args)
finally:
join(tasks)
join()
@TestFeature
@Requirements(
@ -190,13 +191,11 @@ def scenario_parallelization(self, table_type, privilege):
(key,) for key in table_types.keys()
])
@Name("alter settings")
def feature(self, node="clickhouse1", stress=None, parallel=None):
def feature(self, node="clickhouse1", stress=None):
"""Runs test suites above which check correctness over scenarios and permutations
"""
self.context.node = self.context.cluster.node(node)
if parallel is not None:
self.context.parallel = parallel
if stress is not None:
self.context.stress = stress
@ -208,11 +207,9 @@ def feature(self, node="clickhouse1", stress=None, parallel=None):
with Example(str(example)):
with Pool(4) as pool:
tasks = []
try:
for alias in aliases:
run_scenario(pool, tasks, Suite(test=scenario_parallelization, name=alias,
setup=instrument_clickhouse_server_log),
{"table_type": table_type, "privilege": alias})
args = {"table_type": table_type, "privilege": alias}
Suite(test=scenario_parallelization, name=alias, setup=instrument_clickhouse_server_log, parallel=True, executor=pool)(**args)
finally:
join(tasks)
join()

View File

@ -1,7 +1,6 @@
import json
from testflows.core import *
from testflows.core import threading
from testflows.asserts import error
from rbac.requirements import *
@ -258,11 +257,9 @@ def user_with_privileges_on_cluster(self, table_type, node=None):
(key,) for key in table_types.keys()
])
@Name("alter ttl")
def feature(self, node="clickhouse1", stress=None, parallel=None):
def feature(self, node="clickhouse1", stress=None):
self.context.node = self.context.cluster.node(node)
if parallel is not None:
self.context.parallel = parallel
if stress is not None:
self.context.stress = stress
@ -272,11 +269,12 @@ def feature(self, node="clickhouse1", stress=None, parallel=None):
if table_type != "MergeTree" and not self.context.stress:
continue
args = {"table_type" : table_type}
with Example(str(example)):
with Pool(5) as pool:
tasks = []
try:
for scenario in loads(current_module(), Scenario):
run_scenario(pool, tasks, Scenario(test=scenario, setup=instrument_clickhouse_server_log), {"table_type" : table_type})
Scenario(test=scenario, setup=instrument_clickhouse_server_log, parallel=True, executor=pool)(**args)
finally:
join(tasks)
join()

View File

@ -829,20 +829,17 @@ def create_as_merge(self, node=None):
RQ_SRS_006_RBAC_Privileges_CreateTable("1.0"),
)
@Name("create table")
def feature(self, stress=None, parallel=None, node="clickhouse1"):
def feature(self, stress=None, node="clickhouse1"):
"""Check the RBAC functionality of CREATE TABLE.
"""
self.context.node = self.context.cluster.node(node)
if stress is not None:
self.context.stress = stress
if parallel is not None:
self.context.stress = parallel
tasks = []
with Pool(10) as pool:
try:
for scenario in loads(current_module(), Scenario):
run_scenario(pool, tasks, scenario)
Scenario(test=scenario, parallel=True, executor=pool)
finally:
join(tasks)
join()

View File

@ -651,37 +651,35 @@ def dictGetType_check(self, privilege, on, grant_target_name, user_name, type, n
RQ_SRS_006_RBAC_Privileges_None("1.0")
)
@Name("dictGet")
def feature(self, node="clickhouse1", stress=None, parallel=None):
def feature(self, node="clickhouse1", stress=None):
"""Check the RBAC functionality of dictGet.
"""
self.context.node = self.context.cluster.node(node)
if parallel is not None:
self.context.parallel = parallel
if stress is not None:
self.context.stress = stress
with Pool(20) as pool:
tasks = []
try:
run_scenario(pool, tasks, Suite(test=dictGet_granted_directly, setup=instrument_clickhouse_server_log))
run_scenario(pool, tasks, Suite(test=dictGet_granted_via_role, setup=instrument_clickhouse_server_log))
run_scenario(pool, tasks, Suite(test=dictGetOrDefault_granted_directly, setup=instrument_clickhouse_server_log))
run_scenario(pool, tasks, Suite(test=dictGetOrDefault_granted_via_role, setup=instrument_clickhouse_server_log))
run_scenario(pool, tasks, Suite(test=dictHas_granted_directly, setup=instrument_clickhouse_server_log))
run_scenario(pool, tasks, Suite(test=dictHas_granted_via_role, setup=instrument_clickhouse_server_log))
run_scenario(pool, tasks, Suite(test=dictGetHierarchy_granted_directly, setup=instrument_clickhouse_server_log))
run_scenario(pool, tasks, Suite(test=dictGetHierarchy_granted_via_role, setup=instrument_clickhouse_server_log))
run_scenario(pool, tasks, Suite(test=dictIsIn_granted_directly, setup=instrument_clickhouse_server_log))
run_scenario(pool, tasks, Suite(test=dictIsIn_granted_via_role, setup=instrument_clickhouse_server_log))
Suite(test=dictGet_granted_directly, setup=instrument_clickhouse_server_log, parallel=True, executor=pool)
Suite(test=dictGet_granted_via_role, setup=instrument_clickhouse_server_log, parallel=True, executor=pool)
Suite(test=dictGetOrDefault_granted_directly, setup=instrument_clickhouse_server_log, parallel=True, executor=pool)
Suite(test=dictGetOrDefault_granted_via_role, setup=instrument_clickhouse_server_log, parallel=True, executor=pool)
Suite(test=dictHas_granted_directly, setup=instrument_clickhouse_server_log, parallel=True, executor=pool)
Suite(test=dictHas_granted_via_role, setup=instrument_clickhouse_server_log, parallel=True, executor=pool)
Suite(test=dictGetHierarchy_granted_directly, setup=instrument_clickhouse_server_log, parallel=True, executor=pool)
Suite(test=dictGetHierarchy_granted_via_role, setup=instrument_clickhouse_server_log, parallel=True, executor=pool)
Suite(test=dictIsIn_granted_directly, setup=instrument_clickhouse_server_log, parallel=True, executor=pool)
Suite(test=dictIsIn_granted_via_role, setup=instrument_clickhouse_server_log, parallel=True, executor=pool)
for example in dictGetType_granted_directly.examples:
type, = example
args = {"type" : type}
with Example(example):
run_scenario(pool, tasks, Suite(test=dictGetType_granted_directly, setup=instrument_clickhouse_server_log),{"type" : type})
run_scenario(pool, tasks, Suite(test=dictGetType_granted_via_role, setup=instrument_clickhouse_server_log),{"type" : type})
Suite(test=dictGetType_granted_directly, setup=instrument_clickhouse_server_log, parallel=True, executor=pool)(**args)
Suite(test=dictGetType_granted_via_role, setup=instrument_clickhouse_server_log, parallel=True, executor=pool)(**args)
finally:
join(tasks)
join()

View File

@ -1319,13 +1319,12 @@ def cluster_tests(self, cluster, node=None):
"""
self.context.cluster_name = cluster
tasks = []
with Pool(3) as pool:
try:
for suite in loads(current_module(), Suite):
run_scenario(pool, tasks, Suite(test=suite))
Suite(test=suite, parallel=True, executor=pool)
finally:
join(tasks)
join()
@TestFeature
@Requirements(
@ -1340,13 +1339,12 @@ def feature(self, node="clickhouse1"):
self.context.node2 = self.context.cluster.node("clickhouse2")
self.context.node3 = self.context.cluster.node("clickhouse3")
tasks = []
with Pool(3) as pool:
try:
run_scenario(pool, tasks, Feature(test=cluster_tests))
run_scenario(pool, tasks, Scenario(test=local_user))
run_scenario(pool, tasks, Scenario(test=multiple_node_user))
Feature(test=cluster_tests, parallel=True, executor=pool)
Scenario(test=local_user, parallel=True, executor=pool)
Scenario(test=multiple_node_user, parallel=True, executor=pool)
finally:
join(tasks)
join()

View File

@ -6,93 +6,92 @@ from rbac.helper.common import *
@Name("privileges")
def feature(self):
tasks = []
with Pool(10) as pool:
try:
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.insert", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.select", "feature"), ), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.public_tables", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.distributed_table", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.grant_option", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.truncate", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.optimize", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.kill_query", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.kill_mutation", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.role_admin", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.dictGet", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.introspection", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.sources", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.admin_option", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.all_role", "feature")), {})
Feature(test=load("rbac.tests.privileges.insert", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.select", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.public_tables", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.distributed_table", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.grant_option", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.truncate", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.optimize", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.kill_query", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.kill_mutation", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.role_admin", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.dictGet", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.introspection", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.sources", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.admin_option", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.all_role", "feature"), parallel=True, executor=pool)
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.show.show_tables", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.show.show_dictionaries", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.show.show_databases", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.show.show_columns", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.show.show_users", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.show.show_roles", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.show.show_quotas", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.show.show_settings_profiles", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.show.show_row_policies", "feature")), {})
Feature(test=load("rbac.tests.privileges.show.show_tables", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.show.show_dictionaries", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.show.show_databases", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.show.show_columns", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.show.show_users", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.show.show_roles", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.show.show_quotas", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.show.show_settings_profiles", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.show.show_row_policies", "feature"), parallel=True, executor=pool)
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.alter.alter_column", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.alter.alter_index", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.alter.alter_constraint", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.alter.alter_ttl", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.alter.alter_settings", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.alter.alter_update", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.alter.alter_delete", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.alter.alter_freeze", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.alter.alter_fetch", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.alter.alter_move", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.alter.alter_user", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.alter.alter_role", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.alter.alter_row_policy", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.alter.alter_quota", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.alter.alter_settings_profile", "feature")), {})
Feature(test=load("rbac.tests.privileges.alter.alter_column", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.alter.alter_index", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.alter.alter_constraint", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.alter.alter_ttl", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.alter.alter_settings", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.alter.alter_update", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.alter.alter_delete", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.alter.alter_freeze", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.alter.alter_fetch", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.alter.alter_move", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.alter.alter_user", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.alter.alter_role", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.alter.alter_row_policy", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.alter.alter_quota", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.alter.alter_settings_profile", "feature"), parallel=True, executor=pool)
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.create.create_database", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.create.create_dictionary", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.create.create_temp_table", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.create.create_table", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.create.create_user", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.create.create_role", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.create.create_row_policy", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.create.create_quota", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.create.create_settings_profile", "feature")), {})
Feature(test=load("rbac.tests.privileges.create.create_database", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.create.create_dictionary", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.create.create_temp_table", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.create.create_table", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.create.create_user", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.create.create_role", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.create.create_row_policy", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.create.create_quota", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.create.create_settings_profile", "feature"), parallel=True, executor=pool)
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.attach.attach_database", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.attach.attach_dictionary", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.attach.attach_temp_table", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.attach.attach_table", "feature")), {})
Feature(test=load("rbac.tests.privileges.attach.attach_database", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.attach.attach_dictionary", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.attach.attach_temp_table", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.attach.attach_table", "feature"), parallel=True, executor=pool)
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.drop.drop_database", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.drop.drop_dictionary", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.drop.drop_table", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.drop.drop_user", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.drop.drop_role", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.drop.drop_row_policy", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.drop.drop_quota", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.drop.drop_settings_profile", "feature")), {})
Feature(test=load("rbac.tests.privileges.drop.drop_database", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.drop.drop_dictionary", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.drop.drop_table", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.drop.drop_user", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.drop.drop_role", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.drop.drop_row_policy", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.drop.drop_quota", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.drop.drop_settings_profile", "feature"), parallel=True, executor=pool)
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.detach.detach_database", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.detach.detach_dictionary", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.detach.detach_table", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.detach.detach_view", "feature")), {})
Feature(test=load("rbac.tests.privileges.detach.detach_database", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.detach.detach_dictionary", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.detach.detach_table", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.detach.detach_view", "feature"), parallel=True, executor=pool)
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.system.drop_cache", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.system.reload", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.system.flush", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.system.merges", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.system.moves", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.system.replication_queues", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.system.ttl_merges", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.system.restart_replica", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.system.sends", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.system.sync_replica", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.privileges.system.fetches", "feature")), {})
Feature(test=load("rbac.tests.privileges.system.drop_cache", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.system.reload", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.system.flush", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.system.merges", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.system.moves", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.system.replication_queues", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.system.ttl_merges", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.system.restart_replica", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.system.sends", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.system.sync_replica", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.privileges.system.fetches", "feature"), parallel=True, executor=pool)
finally:
join(tasks)
join()
Feature(test=load("rbac.tests.privileges.system.shutdown", "feature"))

View File

@ -117,18 +117,17 @@ def grant_option_check(grant_option_target, grant_target, user_name, table_type,
def feature(self, node="clickhouse1", stress=None, parallel=None):
"""Check the RBAC functionality of privileges with GRANT OPTION.
"""
args = {"table_type": "MergeTree", "privilege": privilege}
self.context.node = self.context.cluster.node(node)
if parallel is not None:
self.context.parallel = parallel
if stress is not None:
self.context.stress = stress
with Pool(12) as pool:
tasks = []
try:
for example in self.examples:
privilege, = example
run_scenario(pool, tasks, Suite(test=grant_option, name=privilege, setup=instrument_clickhouse_server_log), {"table_type": "MergeTree", "privilege": privilege})
Suite(test=grant_option, name=privilege, setup=instrument_clickhouse_server_log, parallel=True, executor=pool)(**args)
finally:
join(tasks)
join()

View File

@ -485,9 +485,11 @@ def role_with_privilege_on_cluster(self, table_type, node=None):
(key,) for key in table_types.keys()
])
@Name("insert")
def feature(self, table_type, parallel=None, stress=None, node="clickhouse1"):
def feature(self, table_type, stress=None, node="clickhouse1"):
"""Check the RBAC functionality of INSERT.
"""
args = {"table_type" : table_type}
self.context.node = self.context.cluster.node(node)
self.context.node1 = self.context.cluster.node("clickhouse1")
@ -496,13 +498,10 @@ def feature(self, table_type, parallel=None, stress=None, node="clickhouse1"):
if stress is not None:
self.context.stress = stress
if parallel is not None:
self.context.stress = parallel
tasks = []
with Pool(10) as pool:
try:
for scenario in loads(current_module(), Scenario):
run_scenario(pool, tasks, Scenario(test=scenario, setup=instrument_clickhouse_server_log), {"table_type" : table_type})
Scenario(test=scenario, setup=instrument_clickhouse_server_log, parallel=True, executor=pool)(**args)
finally:
join(tasks)
join()

View File

@ -408,20 +408,19 @@ def user_with_privilege_on_cluster(self, table_type, node=None):
(key,) for key in table_types.keys()
])
@Name("select")
def feature(self, table_type, parallel=None, stress=None, node="clickhouse1"):
def feature(self, table_type, stress=None, node="clickhouse1"):
"""Check the RBAC functionality of SELECT.
"""
self.context.node = self.context.cluster.node(node)
if stress is not None:
self.context.stress = stress
if parallel is not None:
self.context.stress = parallel
tasks = []
args = {"table_type" : table_type}
with Pool(10) as pool:
try:
for scenario in loads(current_module(), Scenario):
run_scenario(pool, tasks, Scenario(test=scenario, setup=instrument_clickhouse_server_log), {"table_type" : table_type})
Scenario(test=scenario, setup=instrument_clickhouse_server_log, parallel=True, executor=pool)(**args)
finally:
join(tasks)
join()

View File

@ -6,11 +6,10 @@ from rbac.helper.common import *
@Name("views")
def feature(self):
tasks = []
with Pool(3) as pool:
try:
run_scenario(pool, tasks, Feature(test=load("rbac.tests.views.view", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.views.live_view", "feature")), {})
run_scenario(pool, tasks, Feature(test=load("rbac.tests.views.materialized_view", "feature")), {})
Feature(test=load("rbac.tests.views.view", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.views.live_view", "feature"), parallel=True, executor=pool)
Feature(test=load("rbac.tests.views.materialized_view", "feature"), parallel=True, executor=pool)
finally:
join(tasks)
join()

View File

@ -1122,19 +1122,16 @@ def refresh_with_revoked_privilege(self, grant_target_name, user_name, node=None
RQ_SRS_006_RBAC_LiveView("1.0"),
)
@Name("live view")
def feature(self, stress=None, parallel=None, node="clickhouse1"):
def feature(self, stress=None, node="clickhouse1"):
self.context.node = self.context.cluster.node(node)
if stress is not None:
self.context.stress = stress
if parallel is not None:
self.context.stress = parallel
with allow_experimental_live_view(self.context.node):
tasks = []
with Pool(3) as pool:
try:
for suite in loads(current_module(), Suite):
run_scenario(pool, tasks, suite)
Suite(test=suite, parallel=True, executor=pool)
finally:
join(tasks)
join()

View File

@ -2259,18 +2259,15 @@ def insert_on_target_table(self, grant_target_name, user_name, node=None):
RQ_SRS_006_RBAC_MaterializedView("1.0"),
)
@Name("materialized view")
def feature(self, stress=None, parallel=None, node="clickhouse1"):
def feature(self, stress=None, node="clickhouse1"):
self.context.node = self.context.cluster.node(node)
if stress is not None:
self.context.stress = stress
if parallel is not None:
self.context.stress = parallel
tasks = []
with Pool(3) as pool:
try:
for suite in loads(current_module(), Suite):
run_scenario(pool, tasks, suite)
Suite(test=suite, parallel=True, executor=pool)
finally:
join(tasks)
join()

View File

@ -1141,18 +1141,15 @@ def drop_with_revoked_privilege(self, grant_target_name, user_name, node=None):
RQ_SRS_006_RBAC_View("1.0"),
)
@Name("view")
def feature(self, stress=None, parallel=None, node="clickhouse1"):
def feature(self, stress=None, node="clickhouse1"):
self.context.node = self.context.cluster.node(node)
if stress is not None:
self.context.stress = stress
if parallel is not None:
self.context.stress = parallel
tasks = []
with Pool(3) as pool:
try:
for suite in loads(current_module(), Suite):
run_scenario(pool, tasks, suite)
Suite(test=suite, parallel=True, executor=pool)
finally:
join(tasks)
join()

View File

@ -19,8 +19,8 @@ def regression(self, local, clickhouse_binary_path, stress=None):
with Pool(8) as pool:
try:
Feature(test=load("example.regression", "regression"), parallel=True, executor=pool)(**args)
# run_scenario(pool, tasks, Feature(test=load("ldap.regression", "regression")), args)
# run_scenario(pool, tasks, Feature(test=load("rbac.regression", "regression")), args)
# Feature(test=load("ldap.regression", "regression"), parallel=True, executor=pool)(**args)
# Feature(test=load("rbac.regression", "regression"), parallel=True, executor=pool)(**args)
Feature(test=load("aes_encryption.regression", "regression"), parallel=True, executor=pool)(**args)
# Feature(test=load("map_type.regression", "regression"), parallel=True, executor=pool)(**args)
Feature(test=load("window_functions.regression", "regression"), parallel=True, executor=pool)(**args)