zhengruifeng commented on code in PR #48415:
URL: https://github.com/apache/spark/pull/48415#discussion_r1796571907
##########
python/pyspark/sql/tests/plot/test_frame_plot_plotly.py:
##########
@@ -48,79 +48,174 @@ def sdf3(self):
columns = ["sales", "signups", "visits", "date"]
return self.spark.createDataFrame(data, columns)
- def _check_fig_data(self, kind, fig_data, expected_x, expected_y,
expected_name=""):
- if kind == "line":
- self.assertEqual(fig_data["mode"], "lines")
- self.assertEqual(fig_data["type"], "scatter")
- elif kind == "bar":
- self.assertEqual(fig_data["type"], "bar")
- elif kind == "barh":
- self.assertEqual(fig_data["type"], "bar")
- self.assertEqual(fig_data["orientation"], "h")
- elif kind == "scatter":
- self.assertEqual(fig_data["type"], "scatter")
- self.assertEqual(fig_data["orientation"], "v")
- self.assertEqual(fig_data["mode"], "markers")
- elif kind == "area":
- self.assertEqual(fig_data["type"], "scatter")
- self.assertEqual(fig_data["orientation"], "v")
- self.assertEqual(fig_data["mode"], "lines")
- elif kind == "pie":
- self.assertEqual(fig_data["type"], "pie")
- self.assertEqual(list(fig_data["labels"]), expected_x)
- self.assertEqual(list(fig_data["values"]), expected_y)
- return
-
- self.assertEqual(fig_data["xaxis"], "x")
- self.assertEqual(list(fig_data["x"]), expected_x)
- self.assertEqual(fig_data["yaxis"], "y")
- self.assertEqual(list(fig_data["y"]), expected_y)
- self.assertEqual(fig_data["name"], expected_name)
+ def _check_fig_data(self, fig_data, **kwargs):
+ for key, expected_value in kwargs.items():
+ if key in ["x", "y", "labels", "values"]:
+ converted_values = [v.item() if hasattr(v, "item") else v for
v in fig_data[key]]
Review Comment:
we can use it with `has_numpy`
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]