ClickHouse/tests/integration/test_s3_storage_conf_proxy/test.py
Arthur Passos 2bade7db08
Add global proxy setting (#51749)
* initial impl

* fix env ut

* move ut directory

* make sure no null proxy resolver is returned by ProxyConfigurationResolverProvider

* minor adjustment

* add a few tests, still incomplete

* add proxy support for url table function

* use proxy for select from url as well

* remove optional from return type, just returns empty config

* fix style

* style

* black

* ohg boy

* rm in progress file

* god pls don't let me kill anyone

* ...

* add use_aws guards

* remove hard coded s3 proxy resolver

* add concurrency-mt-unsafe

* aa

* black

* add logging back

* revert change

* imrpove code a bit

* helper functions and separate tests

* for some reason, this env test is not working..

* formatting

* :)

* clangtidy

* lint

* revert some stupid things

* small test adjusmtments

* simplify tests

* rename test

* remove extra line

* freaking style change

* simplify a bit

* fix segfault & remove an extra call

* tightly couple proxy provider with context..

* remove useless include

* rename config prefix parameter

* simplify provider a bit

* organize provider a bit

* add a few comments

* comment out proxy env tests

* fix nullptr in unit tests

* make sure old storage proxy config is properly covered without global context instance

* move a few functions from class to anonymous namespace

* fix no fallback for specific storage conf

* change API to accept http method instead of bool

* implement http/https distinction in listresolver, any still not implemented

* implement http/https distinction in remote resolver

* progress on code, improve tests and add url function working test

* use protcol instead of method for http and https

* small fix

* few more adjustments

* fix style

* black

* move enum to proxyconfiguration

* wip

* fix build

* fix ut

* delete atomicroundrobin class

* remove stale include

* add some tests.. need to spend some more time on the design..

* change design a bit

* progress

* use existing context for tests

* rename aux function and fix ut

* ..

* rename test

* try to simplify tests a bit

* simplify tests a bit more

* attempt to fix tests, accept more than one remote resolver

* use proper log id

* try waiting for resolver

* proper wait logic

* black

* empty

* address a few comments

* refactor tests

* remove old tests

* baclk

* use RAII to set/unset env

* black

* clang tidy

* fix env proxy not respecting any

* use log trace

* fix wrong logic in getRemoteREsolver

* fix wrong logic in getRemoteREsolver

* fix test

* remove unwanted code

* remove ClientConfigurationperRequest and auxilary classes

* remove unwanted code

* remove adapter test

* few adjustments and add test for s3 storage conf  with new proxy settings

* black

* use chassert for context

* Add getenv comment
2023-08-24 16:07:26 +03:00

67 lines
1.9 KiB
Python

import logging
import os
import time
import pytest
from helpers.cluster import ClickHouseCluster
import helpers.s3_url_proxy_tests_util as proxy_util
@pytest.fixture(scope="module")
def cluster():
try:
cluster = ClickHouseCluster(__file__)
cluster.add_instance(
"node", main_configs=["configs/config.d/storage_conf.xml"], with_minio=True
)
logging.info("Starting cluster...")
cluster.start()
logging.info("Cluster started")
proxy_util.run_resolver(cluster, os.path.dirname(__file__))
logging.info("Proxy resolver started")
yield cluster
finally:
cluster.shutdown()
def check_proxy_logs(cluster, proxy_instance, http_methods={"POST", "PUT", "GET"}):
for i in range(10):
logs = cluster.get_container_logs(proxy_instance)
# Check with retry that all possible interactions with Minio are present
for http_method in http_methods:
if logs.find(http_method + " http://minio1") >= 0:
return
time.sleep(1)
else:
assert False, f"{http_methods} method not found in logs of {proxy_instance}"
@pytest.mark.parametrize("policy", ["s3", "s3_with_resolver"])
def test_s3_with_proxy_list(cluster, policy):
node = cluster.instances["node"]
node.query(
"""
CREATE TABLE s3_test (
id Int64,
data String
) ENGINE=MergeTree()
ORDER BY id
SETTINGS storage_policy='{}'
""".format(
policy
)
)
node.query("INSERT INTO s3_test VALUES (0,'data'),(1,'data')")
assert (
node.query("SELECT * FROM s3_test order by id FORMAT Values")
== "(0,'data'),(1,'data')"
)
node.query("DROP TABLE IF EXISTS s3_test SYNC")
for proxy in ["proxy1", "proxy2"]:
check_proxy_logs(cluster, proxy, ["PUT", "GET"])