Commit e5cd9af7 authored by Leodegario Lorenzo II's avatar Leodegario Lorenzo II
Browse files

Add scorer and scaling in tune model function

parent 524e141f
......@@ -1149,7 +1149,7 @@ def plot_hyperparameter_accuracies(all_scores, all_std, params):
plt.show()
def tune_model(X, label, task, model, params, n_trials, tree_rs=1337,
write_to_file=None, to_plot=False):
write_to_file=None, to_plot=False, scorer=None, scaling=None):
"""
Return tuned model and params for a given model, dataset, and parameters
......@@ -1183,6 +1183,10 @@ def tune_model(X, label, task, model, params, n_trials, tree_rs=1337,
to_plot : bool
Set to True if score vs parameter plots is desired. Works only when
the number of hyper parameters is at most 2.
scorer : str, default=None
Set specified scorer if desired
scaling : str, default=None
Set specified scaling if desired
Returns
-------
......@@ -1227,6 +1231,30 @@ def tune_model(X, label, task, model, params, n_trials, tree_rs=1337,
X_train, X_test, y_train, y_test = train_test_split(
X, label, random_state=i)
# Perform scaling
if scaling is not None:
# Check if power scaling
if scaling == 'power':
# Check if strictly positive features
if (X_train <= 0).sum() == 0:
# Set scaler
scaler = MLModels.scalers()['power']
# Set method to box cox
scaler.set_params(method='box-cox')
else:
scaler = MLModels.scalers()[scaling]
else:
# Set scaler based on user option
scaler = MLModels.scalers()[scaling]
# Fit to train dataset
scaler.fit(X_train)
# Transform train and test dataset
X_train = scaler.transform(X_train)
X_test = scaler.transform(X_test)
# Initialize model
cur_clf = clf(**cur_param)
......@@ -1234,7 +1262,11 @@ def tune_model(X, label, task, model, params, n_trials, tree_rs=1337,
cur_clf.fit(X_train, y_train.ravel())
# Get score
scores.append(cur_clf.score(X_test, y_test))
if scorer is not None:
scores.append(MLModels.scorers()[scorer](
y_test, cur_clf.predict(X_test)))
else:
scores.append(cur_clf.score(X_test, y_test))
# Update progress bar
pb.update(1)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment