Python Catboost未获取参数数
我没有得到作为输出的参数数量,但是相反,我得到了一个如下类型的错误 TypeError:cv()获得意外的关键字参数“metrics” 正如文档所说,它必须解开它,但我仍然在这里得到错误和卡住。谁能告诉我哪里出了问题?错误发生在最后一行代码中,但下面是函数和类的代码,因为我不知道为什么会发生这种情况 我的代码在这里:Python Catboost未获取参数数,python,catboost,Python,Catboost,我没有得到作为输出的参数数量,但是相反,我得到了一个如下类型的错误 TypeError:cv()获得意外的关键字参数“metrics” 正如文档所说,它必须解开它,但我仍然在这里得到错误和卡住。谁能告诉我哪里出了问题?错误发生在最后一行代码中,但下面是函数和类的代码,因为我不知道为什么会发生这种情况 我的代码在这里: class ModelOptimizer: best_score = None opt = None def __init__(self, model,
class ModelOptimizer:
best_score = None
opt = None
def __init__(self, model, X_train, y_train, categorical_columns_indices=None, n_fold=3, seed=2405, early_stopping_rounds=30, is_stratified=True, is_shuffle=True):
self.model = model
self.X_train = X_train
self.y_train = y_train
self.categorical_columns_indices = categorical_columns_indices
self.n_fold = n_fold
self.seed = seed
self.early_stopping_rounds = early_stopping_rounds
self.is_stratified = is_stratified
self.is_shuffle = is_shuffle
def update_model(self, **kwargs):
for k, v in kwargs.items():
setattr(self.model, k, v)
def evaluate_model(self):
pass
def optimize(self, param_space, max_evals=10, n_random_starts=2):
start_time = time.time()
@use_named_args(param_space)
def _minimize(**params):
self.model.set_params(**params)
return self.evaluate_model()
opt = gp_minimize(_minimize, param_space, n_calls=max_evals, n_random_starts=n_random_starts, random_state=2405, n_jobs=-1)
best_values = opt.x
optimal_values = dict(zip([param.name for param in param_space], best_values))
best_score = opt.fun
self.best_score = best_score
self.opt = opt
print('optimal_parameters: {}\noptimal score: {}\noptimization time: {}'.format(optimal_values, best_score, time.time() - start_time))
print('updating model with optimal values')
self.update_model(**optimal_values)
plot_convergence(opt)
return optimal_values
class CatboostOptimizer(ModelOptimizer):
def evaluate_model(self):
validation_scores = catboost.cv(
catboost.Pool(self.X_train,
self.y_train,
cat_features=self.categorical_columns_indices),
self.model.get_params(),
nfold=self.n_fold,
stratified=self.is_stratified,
seed=self.seed,
early_stopping_rounds=self.early_stopping_rounds,
shuffle=self.is_shuffle,
metrics='auc',
plot=False)
self.scores = validation_scores
test_scores = validation_scores.iloc[:, 2]
best_metric = test_scores.max()
return 1 - best_metric
cb = catboost.CatBoostClassifier(n_estimators=4000, # use large n_estimators deliberately to make use of the early stopping
one_hot_max_size=2,
loss_function='Logloss',
eval_metric='AUC',
boosting_type='Ordered', # use permutations
random_seed=2405,
use_best_model=True,
silent=True)
cb_optimizer = CatboostOptimizer(cb, X_train, y_train)
params_space = [Real(0.01, 0.8, name='learning_rate'),]
cb_optimal_values = cb_optimizer.optimize(params_space)
您使用什么版本的catboost?最近的一个在
cv
函数中没有metrics
参数。相反,您可以在cv
函数的参数字典中将“AUC”作为custom_metric
传递。您使用哪种版本的catboost?最近的一个在cv
函数中没有metrics
参数。相反,您可以在cv
函数的参数字典中将“AUC”作为custom_metric
传递。