diff --git a/ibis/backends/pyspark/__init__.py b/ibis/backends/pyspark/__init__.py index 8511f51300e1..c5f921381139 100644 --- a/ibis/backends/pyspark/__init__.py +++ b/ibis/backends/pyspark/__init__.py @@ -456,6 +456,8 @@ def _register_in_memory_table(self, op: ops.InMemoryTable) -> None: def _finalize_memtable(self, name: str) -> None: """No-op, otherwise a deadlock can occur when using Spark Connect.""" + if isinstance(session := self._session, pyspark.sql.SparkSession): + session.catalog.dropTempView(name) @contextlib.contextmanager def _safe_raw_sql(self, query: str) -> Any: diff --git a/ibis/backends/tests/test_client.py b/ibis/backends/tests/test_client.py index f701f7db3f03..7d1ae751e58b 100644 --- a/ibis/backends/tests/test_client.py +++ b/ibis/backends/tests/test_client.py @@ -35,6 +35,7 @@ PyODBCProgrammingError, SnowflakeProgrammingError, ) +from ibis.conftest import IS_SPARK_REMOTE from ibis.util import gen_name if TYPE_CHECKING: @@ -1688,8 +1689,9 @@ def test_insert_into_table_missing_columns(con, temp_table): ) @pytest.mark.notyet( ["pyspark"], + condition=IS_SPARK_REMOTE, raises=AssertionError, - reason="likely, but not guaranteed deadlock when using spark connect", + reason="likely deadlock when using spark connect", ) def test_memtable_cleanup(con): name = ibis.util.gen_name("temp_memtable")