Skip to content

Commit b8a8e0d

Browse files
committed
[MINOR][PYTHON] Leverage functools.cached_property in SparkSession
### What changes were proposed in this pull request? This PR proposes to replace manual cached property with `functools.cached_property` in SparkSession. ### Why are the changes needed? To reduce code. ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? Existing tests. ### Was this patch authored or co-authored using generative AI tooling? No. Closes #49306 from HyukjinKwon/use-cached-property. Authored-by: Hyukjin Kwon <[email protected]> Signed-off-by: Hyukjin Kwon <[email protected]>
1 parent 789702b commit b8a8e0d

File tree

2 files changed

+10
-19
lines changed

2 files changed

+10
-19
lines changed

Diff for: python/pyspark/sql/connect/session.py

+2-4
Original file line numberDiff line numberDiff line change
@@ -790,13 +790,11 @@ def range(
790790

791791
range.__doc__ = PySparkSession.range.__doc__
792792

793-
@property
793+
@functools.cached_property
794794
def catalog(self) -> "Catalog":
795795
from pyspark.sql.connect.catalog import Catalog
796796

797-
if not hasattr(self, "_catalog"):
798-
self._catalog = Catalog(self)
799-
return self._catalog
797+
return Catalog(self)
800798

801799
catalog.__doc__ = PySparkSession.catalog.__doc__
802800

Diff for: python/pyspark/sql/session.py

+8-15
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
import sys
1919
import warnings
2020
from collections.abc import Sized
21-
from functools import reduce
21+
from functools import reduce, cached_property
2222
from threading import RLock
2323
from types import TracebackType
2424
from typing import (
@@ -773,7 +773,7 @@ def sparkContext(self) -> "SparkContext":
773773
"""
774774
return self._sc
775775

776-
@property
776+
@cached_property
777777
def version(self) -> str:
778778
"""
779779
The version of Spark on which this application is running.
@@ -794,7 +794,7 @@ def version(self) -> str:
794794
"""
795795
return self._jsparkSession.version()
796796

797-
@property
797+
@cached_property
798798
def conf(self) -> RuntimeConfig:
799799
"""Runtime configuration interface for Spark.
800800
@@ -822,11 +822,9 @@ def conf(self) -> RuntimeConfig:
822822
>>> spark.conf.get("key")
823823
'value'
824824
"""
825-
if not hasattr(self, "_conf"):
826-
self._conf = RuntimeConfig(self._jsparkSession.conf())
827-
return self._conf
825+
return RuntimeConfig(self._jsparkSession.conf())
828826

829-
@property
827+
@cached_property
830828
def catalog(self) -> "Catalog":
831829
"""Interface through which the user may create, drop, alter or query underlying
832830
databases, tables, functions, etc.
@@ -854,9 +852,7 @@ def catalog(self) -> "Catalog":
854852
"""
855853
from pyspark.sql.catalog import Catalog
856854

857-
if not hasattr(self, "_catalog"):
858-
self._catalog = Catalog(self)
859-
return self._catalog
855+
return Catalog(self)
860856

861857
@property
862858
def udf(self) -> "UDFRegistration":
@@ -1907,7 +1903,7 @@ def readStream(self) -> DataStreamReader:
19071903
"""
19081904
return DataStreamReader(self)
19091905

1910-
@property
1906+
@cached_property
19111907
def streams(self) -> "StreamingQueryManager":
19121908
"""Returns a :class:`StreamingQueryManager` that allows managing all the
19131909
:class:`StreamingQuery` instances active on `this` context.
@@ -1941,10 +1937,7 @@ def streams(self) -> "StreamingQueryManager":
19411937
"""
19421938
from pyspark.sql.streaming import StreamingQueryManager
19431939

1944-
if hasattr(self, "_sqm"):
1945-
return self._sqm
1946-
self._sqm: StreamingQueryManager = StreamingQueryManager(self._jsparkSession.streams())
1947-
return self._sqm
1940+
return StreamingQueryManager(self._jsparkSession.streams())
19481941

19491942
@property
19501943
def tvf(self) -> "TableValuedFunction":

0 commit comments

Comments
 (0)