Skip to content

Commit

Permalink
test(fixtures): ensure that most connections are cleaned up when the …
Browse files Browse the repository at this point in the history
…session ends (#10693)
  • Loading branch information
cpcloud authored Jan 22, 2025
1 parent febd7d6 commit b9bd2a8
Show file tree
Hide file tree
Showing 20 changed files with 130 additions and 49 deletions.
9 changes: 7 additions & 2 deletions ibis/backends/clickhouse/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,8 +178,13 @@ def add_catalog_and_schema(node):


@pytest.fixture(scope="session")
def con(tmp_path_factory, data_dir, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id).connection
def backend(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id)


@pytest.fixture(scope="session")
def con(backend):
return backend.connection


@pytest.fixture(scope="session")
Expand Down
4 changes: 3 additions & 1 deletion ibis/backends/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -470,7 +470,9 @@ def ddl_backend(request, data_dir, tmp_path_factory, worker_id):
@pytest.fixture(scope="session")
def ddl_con(ddl_backend):
"""Instance of Client, already connected to the db (if applies)."""
return ddl_backend.connection
connection = ddl_backend.connection
yield connection
connection.disconnect()


@pytest.fixture(
Expand Down
9 changes: 7 additions & 2 deletions ibis/backends/datafusion/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,13 @@ def add_catalog_and_schema(node):


@pytest.fixture(scope="session")
def con(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id).connection
def backend(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id)


@pytest.fixture(scope="session")
def con(backend):
return backend.connection


@pytest.fixture(scope="session")
Expand Down
9 changes: 7 additions & 2 deletions ibis/backends/duckdb/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,8 +138,13 @@ def add_catalog_and_schema(node):


@pytest.fixture(scope="session")
def con(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id).connection
def backend(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id)


@pytest.fixture(scope="session")
def con(backend):
return backend.connection


@pytest.fixture(scope="session")
Expand Down
9 changes: 7 additions & 2 deletions ibis/backends/mssql/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,5 +60,10 @@ def connect(*, tmpdir, worker_id, **kw):


@pytest.fixture(scope="session")
def con(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id).connection
def backend(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id)


@pytest.fixture(scope="session")
def con(backend):
return backend.connection
9 changes: 7 additions & 2 deletions ibis/backends/mysql/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,5 +75,10 @@ def connect(*, tmpdir, worker_id, **kw):


@pytest.fixture(scope="session")
def con(tmp_path_factory, data_dir, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id).connection
def backend(tmp_path_factory, data_dir, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id)


@pytest.fixture(scope="session")
def con(backend):
return backend.connection
9 changes: 7 additions & 2 deletions ibis/backends/oracle/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,8 +133,13 @@ def format_table(name: str) -> str:


@pytest.fixture(scope="session")
def con(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id).connection
def backend(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id)


@pytest.fixture(scope="session")
def con(backend):
return backend.connection


def init_oracle_database(
Expand Down
9 changes: 7 additions & 2 deletions ibis/backends/polars/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,13 @@ def assert_series_equal(cls, left, right, *args, **kwargs) -> None:


@pytest.fixture(scope="session")
def con(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id).connection
def backend(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id)


@pytest.fixture(scope="session")
def con(backend):
return backend.connection


@pytest.fixture(scope="session")
Expand Down
9 changes: 7 additions & 2 deletions ibis/backends/postgres/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,8 +69,13 @@ def connect(*, tmpdir, worker_id, **kw):


@pytest.fixture(scope="session")
def con(tmp_path_factory, data_dir, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id).connection
def backend(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id)


@pytest.fixture(scope="session")
def con(backend):
return backend.connection


@pytest.fixture(scope="module")
Expand Down
29 changes: 20 additions & 9 deletions ibis/backends/pyspark/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
from ibis.backends.pyspark.datatypes import PySparkSchema
from ibis.backends.tests.base import BackendTest, ServiceBackendTest
from ibis.backends.tests.data import json_types, topk, win
from ibis.backends.tests.errors import PySparkConnectException
from ibis.conftest import IS_SPARK_REMOTE, SPARK_REMOTE

if TYPE_CHECKING:
Expand Down Expand Up @@ -378,11 +379,12 @@ def connect(*, tmpdir, worker_id, **kw):
return con

@pytest.fixture(scope="session")
def con_streaming(data_dir, tmp_path_factory, worker_id):
backend_test = TestConfForStreaming.load_data(
data_dir, tmp_path_factory, worker_id
)
return backend_test.connection
def backend_streaming(data_dir, tmp_path_factory, worker_id):
return TestConfForStreaming.load_data(data_dir, tmp_path_factory, worker_id)

@pytest.fixture(scope="session")
def con_streaming(backend_streaming):
return backend_streaming.connection

@pytest.fixture(autouse=True, scope="function")
def stop_active_jobs(con_streaming):
Expand All @@ -396,13 +398,22 @@ def write_to_memory(self, expr, table_name):
df = self._session.sql(expr.compile())
df.writeStream.format("memory").queryName(table_name).start()

def __del__(self):
if not SPARK_REMOTE:
try: # noqa: SIM105
self.connection.disconnect()
except (AttributeError, PySparkConnectException):
pass


@pytest.fixture(scope="session")
def con(data_dir, tmp_path_factory, worker_id):
backend_test = TestConf.load_data(data_dir, tmp_path_factory, worker_id)
con = backend_test.connection
def backend(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id)


return con
@pytest.fixture(scope="session")
def con(backend):
return backend.connection


class IbisWindow:
Expand Down
9 changes: 7 additions & 2 deletions ibis/backends/risingwave/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,13 @@ def connect(*, tmpdir, worker_id, port: int | None = None, **kw):


@pytest.fixture(scope="session")
def con(tmp_path_factory, data_dir, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id).connection
def backend(tmp_path_factory, data_dir, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id)


@pytest.fixture(scope="session")
def con(backend):
return backend.connection


@pytest.fixture(scope="module")
Expand Down
9 changes: 7 additions & 2 deletions ibis/backends/snowflake/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,5 +215,10 @@ def connect(*, tmpdir, worker_id, **kw) -> BaseBackend:


@pytest.fixture(scope="session")
def con(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id).connection
def backend(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id)


@pytest.fixture(scope="session")
def con(backend):
return backend.connection
9 changes: 7 additions & 2 deletions ibis/backends/sqlite/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,5 +55,10 @@ def functional_alltypes(self) -> ir.Table:


@pytest.fixture(scope="session")
def con(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id).connection
def backend(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id)


@pytest.fixture(scope="session")
def con(backend):
return backend.connection
6 changes: 6 additions & 0 deletions ibis/backends/tests/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,12 @@ class BackendTest(abc.ABC):
tpc_absolute_tolerance: float | None = None
"Absolute tolerance for floating point comparisons with pytest.approx in TPC correctness tests."

def __del__(self):
try: # noqa: SIM105
self.connection.disconnect()
except AttributeError:
pass

@property
@abc.abstractmethod
def deps(self) -> Iterable[str]:
Expand Down
5 changes: 4 additions & 1 deletion ibis/backends/tests/errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,13 +55,16 @@
from pyspark.errors.exceptions.base import ParseException as PySparkParseException
from pyspark.errors.exceptions.base import PySparkValueError
from pyspark.errors.exceptions.base import PythonException as PySparkPythonException
from pyspark.errors.exceptions.connect import (
SparkConnectException as PySparkConnectException,
)
from pyspark.errors.exceptions.connect import (
SparkConnectGrpcException as PySparkConnectGrpcException,
)
except ImportError:
PySparkParseException = PySparkAnalysisException = PySparkArithmeticException = (
PySparkPythonException
) = PySparkConnectGrpcException = PySparkValueError = None
) = PySparkConnectException = PySparkConnectGrpcException = PySparkValueError = None

try:
from google.api_core.exceptions import BadRequest as GoogleBadRequest
Expand Down
11 changes: 5 additions & 6 deletions ibis/backends/tests/test_array.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,10 @@
GoogleBadRequest,
MySQLOperationalError,
PolarsComputeError,
PsycoPg2ArraySubscriptError,
PsycoPg2IndeterminateDatatype,
PsycoPg2InternalError,
PsycoPg2ProgrammingError,
PsycoPg2SyntaxError,
PsycoPgInvalidTextRepresentation,
PsycoPgSyntaxError,
Py4JJavaError,
PyAthenaDatabaseError,
Expand Down Expand Up @@ -1118,7 +1117,7 @@ def test_unnest_struct(con):


@builtin_array
@pytest.mark.notimpl(["postgres"], raises=PsycoPg2SyntaxError)
@pytest.mark.notimpl(["postgres"], raises=PsycoPgSyntaxError)
@pytest.mark.notimpl(["risingwave"], raises=PsycoPg2InternalError)
@pytest.mark.notimpl(
["trino"], reason="inserting maps into structs doesn't work", raises=TrinoUserError
Expand Down Expand Up @@ -1209,7 +1208,7 @@ def test_zip_null(con, fn):


@builtin_array
@pytest.mark.notimpl(["postgres"], raises=PsycoPg2SyntaxError)
@pytest.mark.notimpl(["postgres"], raises=PsycoPgSyntaxError)
@pytest.mark.notimpl(["risingwave"], raises=PsycoPg2ProgrammingError)
@pytest.mark.notimpl(["datafusion"], raises=Exception, reason="not yet supported")
@pytest.mark.notimpl(
Expand Down Expand Up @@ -1769,7 +1768,7 @@ def test_table_unnest_column_expr(backend):
@pytest.mark.notimpl(["datafusion", "polars"], raises=com.OperationNotDefinedError)
@pytest.mark.notimpl(["trino"], raises=TrinoUserError)
@pytest.mark.notimpl(["athena"], raises=PyAthenaOperationalError)
@pytest.mark.notimpl(["postgres"], raises=PsycoPg2SyntaxError)
@pytest.mark.notimpl(["postgres"], raises=PsycoPgSyntaxError)
@pytest.mark.notimpl(["risingwave"], raises=PsycoPg2ProgrammingError)
@pytest.mark.notyet(
["risingwave"], raises=PsycoPg2InternalError, reason="not supported in risingwave"
Expand Down Expand Up @@ -1890,7 +1889,7 @@ def test_array_agg_bool(con, data, agg, baseline_func):

@pytest.mark.notyet(
["postgres"],
raises=PsycoPg2ArraySubscriptError,
raises=PsycoPgInvalidTextRepresentation,
reason="all dimensions must match in size",
)
@pytest.mark.notimpl(["risingwave", "flink"], raises=com.OperationNotDefinedError)
Expand Down
4 changes: 2 additions & 2 deletions ibis/backends/tests/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
ImpalaHiveServer2Error,
OracleDatabaseError,
PsycoPg2InternalError,
PsycoPg2UndefinedObject,
PsycoPgUndefinedObject,
Py4JJavaError,
PyAthenaDatabaseError,
PyODBCProgrammingError,
Expand Down Expand Up @@ -725,7 +725,7 @@ def test_list_database_contents(con):
@pytest.mark.notyet(["databricks"], raises=DatabricksServerOperationError)
@pytest.mark.notyet(["bigquery"], raises=com.UnsupportedBackendType)
@pytest.mark.notyet(
["postgres"], raises=PsycoPg2UndefinedObject, reason="no unsigned int types"
["postgres"], raises=PsycoPgUndefinedObject, reason="no unsigned int types"
)
@pytest.mark.notyet(
["oracle"], raises=OracleDatabaseError, reason="no unsigned int types"
Expand Down
4 changes: 2 additions & 2 deletions ibis/backends/tests/test_generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
OracleDatabaseError,
PolarsInvalidOperationError,
PsycoPg2InternalError,
PsycoPg2SyntaxError,
PsycoPgSyntaxError,
Py4JJavaError,
PyAthenaDatabaseError,
PyAthenaOperationalError,
Expand Down Expand Up @@ -1735,7 +1735,7 @@ def hash_256(col):
pytest.mark.notimpl(["flink"], raises=Py4JJavaError),
pytest.mark.notimpl(["druid"], raises=PyDruidProgrammingError),
pytest.mark.notimpl(["oracle"], raises=OracleDatabaseError),
pytest.mark.notimpl(["postgres"], raises=PsycoPg2SyntaxError),
pytest.mark.notimpl(["postgres"], raises=PsycoPgSyntaxError),
pytest.mark.notimpl(["risingwave"], raises=PsycoPg2InternalError),
pytest.mark.notimpl(["snowflake"], raises=AssertionError),
pytest.mark.never(
Expand Down
8 changes: 4 additions & 4 deletions ibis/backends/tests/test_struct.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
DatabricksServerOperationError,
PolarsColumnNotFoundError,
PsycoPg2InternalError,
PsycoPg2SyntaxError,
PsycoPgSyntaxError,
Py4JJavaError,
PyAthenaDatabaseError,
PyAthenaOperationalError,
Expand Down Expand Up @@ -138,7 +138,7 @@ def test_collect_into_struct(alltypes):


@pytest.mark.notimpl(
["postgres"], reason="struct literals not implemented", raises=PsycoPg2SyntaxError
["postgres"], reason="struct literals not implemented", raises=PsycoPgSyntaxError
)
@pytest.mark.notimpl(
["risingwave"],
Expand All @@ -155,7 +155,7 @@ def test_field_access_after_case(con):


@pytest.mark.notimpl(
["postgres"], reason="struct literals not implemented", raises=PsycoPg2SyntaxError
["postgres"], reason="struct literals not implemented", raises=PsycoPgSyntaxError
)
@pytest.mark.notimpl(["flink"], raises=IbisError, reason="not implemented in ibis")
@pytest.mark.parametrize(
Expand Down Expand Up @@ -242,7 +242,7 @@ def test_keyword_fields(con, nullable):

@pytest.mark.notyet(
["postgres"],
raises=PsycoPg2SyntaxError,
raises=PsycoPgSyntaxError,
reason="sqlglot doesn't implement structs for postgres correctly",
)
@pytest.mark.notyet(
Expand Down
9 changes: 7 additions & 2 deletions ibis/backends/trino/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,8 +162,13 @@ def awards_players(self):


@pytest.fixture(scope="session")
def con(tmp_path_factory, data_dir, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id).connection
def backend(tmp_path_factory, data_dir, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id)


@pytest.fixture(scope="session")
def con(backend):
return backend.connection


def generate_tpc_tables(suite_name, *, data_dir):
Expand Down

0 comments on commit b9bd2a8

Please sign in to comment.