Skip to content

Commit a69c5ea

Browse files
itholicHyukjinKwon
authored andcommitted
[SPARK-49567][PYTHON] Use classic instead of vanilla from PySpark code base
### What changes were proposed in this pull request? This PR proposes to use classic instead of vanilla from PySpark code base ### Why are the changes needed? For unifying the terms to avoid confusion on overall code base including docs ### Does this PR introduce _any_ user-facing change? No API changes, but user-facing `pyspark.sql.DataFrame.offset` docs use the terms `classic` instead of `vanilla` ### How was this patch tested? CI ### Was this patch authored or co-authored using generative AI tooling? No Closes #48044 from itholic/vanilla_classic. Authored-by: Haejoon Lee <[email protected]> Signed-off-by: Hyukjin Kwon <[email protected]>
1 parent b0c5642 commit a69c5ea

File tree

6 files changed

+8
-8
lines changed

6 files changed

+8
-8
lines changed

python/pyspark/sql/connect/dataframe.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1783,7 +1783,7 @@ def __getitem__(
17831783
)
17841784
)
17851785
else:
1786-
# TODO: revisit vanilla Spark's Dataset.col
1786+
# TODO: revisit classic Spark's Dataset.col
17871787
# if (sparkSession.sessionState.conf.supportQuotedRegexColumnName) {
17881788
# colRegex(colName)
17891789
# } else {

python/pyspark/sql/connect/group.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,7 @@ def __init__(
101101

102102
def __repr__(self) -> str:
103103
# the expressions are not resolved here,
104-
# so the string representation can be different from vanilla PySpark.
104+
# so the string representation can be different from classic PySpark.
105105
grouping_str = ", ".join(str(e._expr) for e in self._grouping_cols)
106106
grouping_str = f"grouping expressions: [{grouping_str}]"
107107

python/pyspark/sql/dataframe.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1332,7 +1332,7 @@ def offset(self, num: int) -> "DataFrame":
13321332
.. versionadded:: 3.4.0
13331333
13341334
.. versionchanged:: 3.5.0
1335-
Supports vanilla PySpark.
1335+
Supports classic PySpark.
13361336
13371337
Parameters
13381338
----------

python/pyspark/sql/tests/connect/streaming/test_parity_listener.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -176,7 +176,7 @@ def test_slow_query(self):
176176

177177
def test_listener_throw(self):
178178
"""
179-
Following Vanilla Spark's behavior, when the callback of user-defined listener throws,
179+
Following classic Spark's behavior, when the callback of user-defined listener throws,
180180
other listeners should still proceed.
181181
"""
182182

python/pyspark/sql/tests/connect/test_connect_function.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2572,7 +2572,7 @@ def test_function_parity(self):
25722572

25732573
cf_fn = {name for (name, value) in getmembers(CF, isfunction) if name[0] != "_"}
25742574

2575-
# Functions in vanilla PySpark we do not expect to be available in Spark Connect
2575+
# Functions in classic PySpark we do not expect to be available in Spark Connect
25762576
sf_excluded_fn = set()
25772577

25782578
self.assertEqual(
@@ -2581,15 +2581,15 @@ def test_function_parity(self):
25812581
"Missing functions in Spark Connect not as expected",
25822582
)
25832583

2584-
# Functions in Spark Connect we do not expect to be available in vanilla PySpark
2584+
# Functions in Spark Connect we do not expect to be available in classic PySpark
25852585
cf_excluded_fn = {
25862586
"check_dependencies", # internal helper function
25872587
}
25882588

25892589
self.assertEqual(
25902590
cf_fn - sf_fn,
25912591
cf_excluded_fn,
2592-
"Missing functions in vanilla PySpark not as expected",
2592+
"Missing functions in classic PySpark not as expected",
25932593
)
25942594

25952595
# SPARK-45216: Fix non-deterministic seeded Dataset APIs

python/pyspark/testing/utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -185,7 +185,7 @@ def setUpClass(cls):
185185
def tearDownClass(cls):
186186
cls.sc.stop()
187187

188-
def test_assert_vanilla_mode(self):
188+
def test_assert_classic_mode(self):
189189
from pyspark.sql import is_remote
190190

191191
self.assertFalse(is_remote())

0 commit comments

Comments
 (0)