shingjan commented on code in PR #12144:
URL: https://github.com/apache/tvm/pull/12144#discussion_r926281877
##########
python/tvm/auto_scheduler/cost_model/xgb_model.py:
##########
@@ -539,125 +539,128 @@ def feval(preds, labels):
return feval
-def custom_callback(
- stopping_rounds,
- metric,
- fevals,
- evals=(),
- log_file=None,
- maximize=False,
- verbose_eval=True,
- skip_every=2,
-):
- """Callback function for xgboost to support multiple custom evaluation
functions"""
- # pylint: disable=import-outside-toplevel
- from xgboost.core import EarlyStopException
- from xgboost.callback import _fmt_metric
-
- try:
- from xgboost.training import aggcv
- except ImportError:
- from xgboost.callback import _aggcv as aggcv
-
- state = {}
- metric_shortname = metric.split("-")[1]
-
- def init(env):
- """internal function"""
- bst = env.model
-
- state["maximize_score"] = maximize
- state["best_iteration"] = 0
- if maximize:
- state["best_score"] = float("-inf")
- else:
- state["best_score"] = float("inf")
+class CustomCallback(callback.TrainingCallback):
Review Comment:
`TrainingCallBack` could have different implementation and backwards
compatibility could be very useful here in case `xgboost` < 1.6.0 is used. You
can refer to the change [here](https://github.com/apache/tvm/pull/12141) in
meta schedule.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]