Skip to content

Commit

Permalink
quick fix #2
Browse files Browse the repository at this point in the history
  • Loading branch information
dannymeijer committed Nov 12, 2024
1 parent 1e645d8 commit abb435b
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 7 deletions.
10 changes: 4 additions & 6 deletions src/koheesio/spark/utils/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,18 +79,16 @@ def get_spark_minor_version() -> float:

def check_if_pyspark_connect_is_supported() -> bool:
"""Check if the current version of PySpark supports the connect module"""
result = False

if SPARK_MINOR_VERSION >= 3.5:
try:
importlib.import_module("pyspark.sql.connect")
from pyspark.sql.connect.column import Column

_col: Column
result = True
_col: Column # type: ignore
return True
except (ModuleNotFoundError, ImportError):
result = False
return result
return False
return False


if check_if_pyspark_connect_is_supported():
Expand Down
2 changes: 1 addition & 1 deletion tests/spark/test_spark_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def test_unhappy_get_active_session_spark_connect(self):
patch("pyspark.sql.SparkSession.getActiveSession", return_value=None),
):
session = MagicMock(SparkSession=MagicMock(getActiveSession=MagicMock(return_value=None)))
with patch.dict("sys.modules", {"pyspark.sql.connect": session}):
with patch.dict("sys.modules", {"pyspark.sql.connect.session": session}):
with pytest.raises(
RuntimeError,
match="No active Spark session found. Please create a Spark session before using module "
Expand Down

0 comments on commit abb435b

Please sign in to comment.