super().__init__
中传递自定义损失函数的名称,在第二个子类中,您可以将自定义损失函数的名称作为 _SUPPORTED_LOSS
传递。_check_params
的 ValueError
,您必须重写此函数或考虑对此函数进行异常处理。class my_base_gradient_boost(BaseGradientBoosting, metaclass=ABCMeta):
@abstractmethod
def __init__(self, *args):
super().__init__(loss='my_custom_loss', *other_args)
def _check_params(self):
try:
super()._check_params()
except ValueError as e:
if str(e) == "Loss 'my_costum_loss' not supported. ":
self.loss_ = self.my_costum_loss
else:
raise
class my_classifier(my_base_gradient_boost, GradientBoostingClassifier):
_SUPPORTED_LOSS = ('my_costum_loss')
@_deprecate_positional_args
def __init__(self, *args):
super().__init__(*args)
class CustomGradientBoostingRegressor(GradientBoostingRegressor):
def __init__(self, penalty_factor=5, *args, **kwargs):
super().__init__(*args, **kwargs)
self.penalty_factor = penalty_factor
def fit(self, X, y, sample_weight=None):
# Custom logic to compute the loss, giving higher penalty for errors when y_true is zero
super().fit(X, y, sample_weight=sample_weight)
loss = np.abs(y - self.predict(X))
loss[y == 0] *= self.penalty_factor
return self