Skip to content

Commit 06a28df

Browse files
committed
fixup
1 parent 9f1f1bd commit 06a28df

File tree

3 files changed

+30
-9
lines changed

3 files changed

+30
-9
lines changed

python/pyspark/sql/classic/dataframe.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1029,7 +1029,7 @@ def __getattr__(self, name: str) -> Column:
10291029
return Column(jc)
10301030

10311031
def __dir__(self) -> List[str]:
1032-
attrs = set(super().__dir__())
1032+
attrs = set(dir(DataFrame))
10331033
attrs.update(filter(lambda s: s.isidentifier(), self.columns))
10341034
return sorted(attrs)
10351035

@@ -1953,15 +1953,15 @@ def sampleBy(
19531953
def _test() -> None:
19541954
import doctest
19551955
from pyspark.sql import SparkSession
1956-
import pyspark.sql.classic.dataframe
1956+
import pyspark.sql.dataframe
19571957

1958-
globs = pyspark.sql.classic.dataframe.__dict__.copy()
1958+
globs = pyspark.sql.dataframe.__dict__.copy()
19591959
spark = (
19601960
SparkSession.builder.master("local[4]").appName("sql.classic.dataframe tests").getOrCreate()
19611961
)
19621962
globs["spark"] = spark
19631963
(failure_count, test_count) = doctest.testmod(
1964-
pyspark.sql.classic.dataframe,
1964+
pyspark.sql.dataframe,
19651965
globs=globs,
19661966
optionflags=doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE | doctest.REPORT_NDIFF,
19671967
)

python/pyspark/sql/connect/dataframe.py

Lines changed: 25 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1659,6 +1659,22 @@ def sampleBy(
16591659
session=self._session,
16601660
)
16611661

1662+
def _ipython_key_completions_(self) -> List[str]:
1663+
"""Returns the names of columns in this :class:`DataFrame`.
1664+
1665+
Examples
1666+
--------
1667+
>>> df = spark.createDataFrame([(2, "Alice"), (5, "Bob")], ["age", "name"])
1668+
>>> df._ipython_key_completions_()
1669+
['age', 'name']
1670+
1671+
Would return illegal identifiers.
1672+
>>> df = spark.createDataFrame([(2, "Alice"), (5, "Bob")], ["age 1", "name?1"])
1673+
>>> df._ipython_key_completions_()
1674+
['age 1', 'name?1']
1675+
"""
1676+
return self.columns
1677+
16621678
def __getattr__(self, name: str) -> "Column":
16631679
if name in ["_jseq", "_jdf", "_jmap", "_jcols", "rdd", "toJSON"]:
16641680
raise PySparkAttributeError(
@@ -1738,7 +1754,7 @@ def _col(self, name: str) -> Column:
17381754
)
17391755

17401756
def __dir__(self) -> List[str]:
1741-
attrs = set(super().__dir__())
1757+
attrs = set(dir(DataFrame))
17421758
attrs.update(self.columns)
17431759
return sorted(attrs)
17441760

@@ -2149,11 +2165,16 @@ def _test() -> None:
21492165
import sys
21502166
import doctest
21512167
from pyspark.sql import SparkSession as PySparkSession
2152-
import pyspark.sql.connect.dataframe
2168+
import pyspark.sql.dataframe
21532169

21542170
os.chdir(os.environ["SPARK_HOME"])
21552171

2156-
globs = pyspark.sql.connect.dataframe.__dict__.copy()
2172+
globs = pyspark.sql.dataframe.__dict__.copy()
2173+
2174+
del pyspark.sql.dataframe.DataFrame.toJSON.__doc__
2175+
del pyspark.sql.dataframe.DataFrame.rdd.__doc__
2176+
del pyspark.sql.dataframe.DataFrame.checkpoint.__doc__
2177+
del pyspark.sql.dataframe.DataFrame.localCheckpoint.__doc__
21572178

21582179
globs["spark"] = (
21592180
PySparkSession.builder.appName("sql.connect.dataframe tests")
@@ -2162,7 +2183,7 @@ def _test() -> None:
21622183
)
21632184

21642185
(failure_count, test_count) = doctest.testmod(
2165-
pyspark.sql.connect.dataframe,
2186+
pyspark.sql.dataframe,
21662187
globs=globs,
21672188
optionflags=doctest.ELLIPSIS
21682189
| doctest.NORMALIZE_WHITESPACE

python/pyspark/sql/dataframe.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5295,7 +5295,7 @@ def _ipython_key_completions_(self) -> List[str]:
52955295
>>> df._ipython_key_completions_()
52965296
['age 1', 'name?1']
52975297
"""
5298-
return self.columns
5298+
...
52995299

53005300
@dispatch_df_method
53015301
def withColumns(self, *colsMap: Dict[str, Column]) -> "DataFrame":

0 commit comments

Comments
 (0)