Skip to content

Commit

Permalink
Revert "[SPARK-49531][PYTHON][CONNECT] Support line plot with plotly …
Browse files Browse the repository at this point in the history
…backend"

This reverts commit 3b8ddda.
  • Loading branch information
dongjoon-hyun committed Sep 15, 2024
1 parent 017b0ea commit fa6a078
Show file tree
Hide file tree
Showing 16 changed files with 0 additions and 514 deletions.
4 changes: 0 additions & 4 deletions dev/sparktestsupport/modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -548,8 +548,6 @@ def __hash__(self):
"pyspark.sql.tests.test_udtf",
"pyspark.sql.tests.test_utils",
"pyspark.sql.tests.test_resources",
"pyspark.sql.tests.plot.test_frame_plot",
"pyspark.sql.tests.plot.test_frame_plot_plotly",
],
)

Expand Down Expand Up @@ -1053,8 +1051,6 @@ def __hash__(self):
"pyspark.sql.tests.connect.test_parity_arrow_cogrouped_map",
"pyspark.sql.tests.connect.test_parity_python_datasource",
"pyspark.sql.tests.connect.test_parity_python_streaming_datasource",
"pyspark.sql.tests.connect.test_parity_frame_plot",
"pyspark.sql.tests.connect.test_parity_frame_plot_plotly",
"pyspark.sql.tests.connect.test_utils",
"pyspark.sql.tests.connect.client.test_artifact",
"pyspark.sql.tests.connect.client.test_artifact_localcluster",
Expand Down
5 changes: 0 additions & 5 deletions python/pyspark/errors/error-conditions.json
Original file line number Diff line number Diff line change
Expand Up @@ -1088,11 +1088,6 @@
"Function `<func_name>` should use only POSITIONAL or POSITIONAL OR KEYWORD arguments."
]
},
"UNSUPPORTED_PLOT_BACKEND": {
"message": [
"`<backend>` is not supported, it should be one of the values from <supported_backends>"
]
},
"UNSUPPORTED_SIGNATURE": {
"message": [
"Unsupported signature: <signature>."
Expand Down
5 changes: 0 additions & 5 deletions python/pyspark/sql/classic/dataframe.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,6 @@
from pyspark.sql.classic.column import _to_seq, _to_list, _to_java_column
from pyspark.sql.readwriter import DataFrameWriter, DataFrameWriterV2
from pyspark.sql.merge import MergeIntoWriter
from pyspark.sql.plot import PySparkPlotAccessor
from pyspark.sql.streaming import DataStreamWriter
from pyspark.sql.types import (
StructType,
Expand Down Expand Up @@ -1863,10 +1862,6 @@ def executionInfo(self) -> Optional["ExecutionInfo"]:
messageParameters={"member": "queryExecution"},
)

@property
def plot(self) -> PySparkPlotAccessor:
return PySparkPlotAccessor(self)


class DataFrameNaFunctions(ParentDataFrameNaFunctions):
def __init__(self, df: ParentDataFrame):
Expand Down
5 changes: 0 additions & 5 deletions python/pyspark/sql/connect/dataframe.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,6 @@
UnresolvedStar,
)
from pyspark.sql.connect.functions import builtin as F
from pyspark.sql.plot import PySparkPlotAccessor
from pyspark.sql.pandas.types import from_arrow_schema, to_arrow_schema
from pyspark.sql.pandas.functions import _validate_pandas_udf # type: ignore[attr-defined]

Expand Down Expand Up @@ -2240,10 +2239,6 @@ def rdd(self) -> "RDD[Row]":
def executionInfo(self) -> Optional["ExecutionInfo"]:
return self._execution_info

@property
def plot(self) -> PySparkPlotAccessor:
return PySparkPlotAccessor(self)


class DataFrameNaFunctions(ParentDataFrameNaFunctions):
def __init__(self, df: ParentDataFrame):
Expand Down
27 changes: 0 additions & 27 deletions python/pyspark/sql/dataframe.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@
from pyspark.sql.column import Column
from pyspark.sql.readwriter import DataFrameWriter, DataFrameWriterV2
from pyspark.sql.merge import MergeIntoWriter
from pyspark.sql.plot import PySparkPlotAccessor
from pyspark.sql.streaming import DataStreamWriter
from pyspark.sql.types import StructType, Row
from pyspark.sql.utils import dispatch_df_method
Expand Down Expand Up @@ -6395,32 +6394,6 @@ def executionInfo(self) -> Optional["ExecutionInfo"]:
"""
...

@property
def plot(self) -> PySparkPlotAccessor:
"""
Returns a :class:`PySparkPlotAccessor` for plotting functions.
.. versionadded:: 4.0.0
Returns
-------
:class:`PySparkPlotAccessor`
Notes
-----
This API is experimental.
Examples
--------
>>> data = [("A", 10, 1.5), ("B", 30, 2.5), ("C", 20, 3.5)]
>>> columns = ["category", "int_val", "float_val"]
>>> df = spark.createDataFrame(data, columns)
>>> type(df.plot)
<class 'pyspark.sql.plot.core.PySparkPlotAccessor'>
>>> df.plot.line(x="category", y=["int_val", "float_val"]) # doctest: +SKIP
"""
...


class DataFrameNaFunctions:
"""Functionality for working with missing data in :class:`DataFrame`.
Expand Down
21 changes: 0 additions & 21 deletions python/pyspark/sql/plot/__init__.py

This file was deleted.

135 changes: 0 additions & 135 deletions python/pyspark/sql/plot/core.py

This file was deleted.

30 changes: 0 additions & 30 deletions python/pyspark/sql/plot/plotly.py

This file was deleted.

36 changes: 0 additions & 36 deletions python/pyspark/sql/tests/connect/test_parity_frame_plot.py

This file was deleted.

36 changes: 0 additions & 36 deletions python/pyspark/sql/tests/connect/test_parity_frame_plot_plotly.py

This file was deleted.

16 changes: 0 additions & 16 deletions python/pyspark/sql/tests/plot/__init__.py

This file was deleted.

Loading

0 comments on commit fa6a078

Please sign in to comment.