Skip to content

Commit

Permalink
chore: clean up on non-spark-connect pyspark
Browse files Browse the repository at this point in the history
  • Loading branch information
cpcloud committed Sep 23, 2024
1 parent ce64ba6 commit fd03718
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 1 deletion.
2 changes: 2 additions & 0 deletions ibis/backends/pyspark/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -456,6 +456,8 @@ def _register_in_memory_table(self, op: ops.InMemoryTable) -> None:

def _finalize_memtable(self, name: str) -> None:
"""No-op, otherwise a deadlock can occur when using Spark Connect."""
if isinstance(session := self._session, pyspark.sql.SparkSession):
session.catalog.dropTempView(name)

@contextlib.contextmanager
def _safe_raw_sql(self, query: str) -> Any:
Expand Down
4 changes: 3 additions & 1 deletion ibis/backends/tests/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
PyODBCProgrammingError,
SnowflakeProgrammingError,
)
from ibis.conftest import IS_SPARK_REMOTE
from ibis.util import gen_name

if TYPE_CHECKING:
Expand Down Expand Up @@ -1688,8 +1689,9 @@ def test_insert_into_table_missing_columns(con, temp_table):
)
@pytest.mark.notyet(
["pyspark"],
condition=IS_SPARK_REMOTE,
raises=AssertionError,
reason="likely, but not guaranteed deadlock when using spark connect",
reason="likely deadlock when using spark connect",
)
def test_memtable_cleanup(con):
name = ibis.util.gen_name("temp_memtable")
Expand Down

0 comments on commit fd03718

Please sign in to comment.