Fix java error

This commit is contained in:
kssenii 2023-04-12 22:40:13 +02:00
parent 18723b1a44
commit 0c8d65b759
5 changed files with 29 additions and 17 deletions

View File

@ -17,6 +17,7 @@ import urllib.parse
import shlex
import urllib3
import requests
import pyspark
try:
# Please, add modules that required for specific tests only here.
@ -447,7 +448,16 @@ class ClickHouseCluster:
self.minio_redirect_ip = None
self.minio_redirect_port = 8080
self.spark_session = None
self.spark_session = (
pyspark.sql.SparkSession.builder.appName("spark_test")
.config(
"spark.jars.packages",
"org.apache.hudi:hudi-spark3.3-bundle_2.12:0.13.0,io.delta:delta-core_2.12:2.2.0,org.apache.iceberg:iceberg-spark-runtime-3.3_2.12:1.1.0",
)
.master("local")
.getOrCreate()
.stop()
)
self.with_azurite = False

View File

@ -65,5 +65,19 @@
"test_server_reload/test.py::test_remove_http_port",
"test_server_reload/test.py::test_remove_mysql_port",
"test_server_reload/test.py::test_remove_postgresql_port",
"test_server_reload/test.py::test_remove_tcp_port"
"test_server_reload/test.py::test_remove_tcp_port",
"test_storage_hudi/test.py::test_single_hudi_file",
"test_storage_hudi/test.py::test_multiple_hudi_files",
"test_storage_hudi/test.py::test_types",
"test_storage_delta/test.py::test_single_log_file",
"test_storage_delta/test.py::test_partition_by",
"test_storage_delta/test.py::test_checkpoint",
"test_storage_delta/test.py::test_multiple_log_files",
"test_storage_delta/test.py::test_metadata",
"test_storage_delta/test.py::test_types",
"test_storage_iceberg/test.py::test_single_iceberg_file",
"test_storage_iceberg/test.py::test_partition_by",
"test_storage_iceberg/test.py::test_multiple_iceberg_files",
"test_storage_iceberg/test.py::test_types",
]

View File

@ -61,11 +61,7 @@ def started_cluster():
prepare_s3_bucket(cluster)
pyspark.sql.SparkSession.builder.appName("spark_test").config(
"spark.jars.packages",
"org.apache.hudi:hudi-spark3.3-bundle_2.12:0.13.0,io.delta:delta-core_2.12:2.2.0,org.apache.iceberg:iceberg-spark-runtime-3.3_2.12:1.1.0",
).master("local").getOrCreate().stop()
pyspark.sql.SparkSession.builder.appName("spark_test").master("local").getOrCreate().stop()
cluster.spark_session = get_spark()
yield cluster

View File

@ -60,11 +60,7 @@ def started_cluster():
prepare_s3_bucket(cluster)
logging.info("S3 bucket created")
pyspark.sql.SparkSession.builder.appName("spark_test").config(
"spark.jars.packages",
"org.apache.hudi:hudi-spark3.3-bundle_2.12:0.13.0,io.delta:delta-core_2.12:2.2.0,org.apache.iceberg:iceberg-spark-runtime-3.3_2.12:1.1.0",
).master("local").getOrCreate().stop()
pyspark.sql.SparkSession.builder.appName("spark_test").master("local").getOrCreate().stop()
cluster.spark_session = get_spark()
yield cluster

View File

@ -62,11 +62,7 @@ def started_cluster():
prepare_s3_bucket(cluster)
logging.info("S3 bucket created")
pyspark.sql.SparkSession.builder.appName("spark_test").config(
"spark.jars.packages",
"org.apache.hudi:hudi-spark3.3-bundle_2.12:0.13.0,io.delta:delta-core_2.12:2.2.0,org.apache.iceberg:iceberg-spark-runtime-3.3_2.12:1.1.0",
).master("local").getOrCreate().stop()
pyspark.sql.SparkSession.builder.appName("spark_test").master("local").getOrCreate().stop()
cluster.spark_session = get_spark()
yield cluster