Add unit tests for constrained multi-objective GPSampler#6235
Add unit tests for constrained multi-objective GPSampler#6235nabenabe0928 merged 2 commits intooptuna:masterfrom
GPSampler#6235Conversation
import optuna
import optunahub
hpolib = optunahub.load_module("benchmarks/hpolib").Problem(
dataset_id=0, metric_names=["val_loss", "model_size"]
)
def objective(trial: optuna.Trial) -> tuple[float, float]:
val_loss, model_size = hpolib(trial)
trial.set_user_attr("constraints", (model_size - 1400, ))
return val_loss, model_size
def constraints(trial: optuna.trial.FrozenTrial) -> tuple[float]:
return trial.user_attrs["constraints"]
sampler = optuna.samplers.GPSampler(seed=0, constraints_func=constraints)
study = optuna.create_study(sampler=sampler, directions=hpolib.directions)
study.optimize(objective, n_trials=20)
print(study.best_trials)I confirmed that the |
kAIto47802
left a comment
There was a problem hiding this comment.
Thank you for the PR! I left some minor comments. PTAL ![]()
|
|
||
| import optuna | ||
| import optuna._gp.acqf as acqf_module | ||
| import optuna._gp.gp as optuna_gp |
There was a problem hiding this comment.
| import optuna._gp.gp as optuna_gp | |
| import optuna._gp.gp as gp |
How about naming like this, following the import in sampler.py.
optuna/optuna/samplers/_gp/sampler.py
Line 29 in 255fc3d
There was a problem hiding this comment.
Let me keep the module name as is to avoid confusion 🙇
| {"a": optuna.distributions.FloatDistribution(0.0, 1.0)} | ||
| ) | ||
| gpr = optuna._gp.gp.fit_kernel_params( | ||
| gpr = optuna_gp.fit_kernel_params( |
There was a problem hiding this comment.
| gpr = optuna_gp.fit_kernel_params( | |
| gpr = gp.fit_kernel_params( |
According to the suggestion above, this part needs to be updated.
optuna/optuna/samplers/_gp/sampler.py
Line 259 in 255fc3d
| ) -> tuple[float] | tuple[float, float]: | ||
| x = trial.suggest_float("x", 0, 1) | ||
| if n_objectives == 1: | ||
| return (x,) |
There was a problem hiding this comment.
Returning a float instead of tuple[float] is sufficient for the single-objective optimization.
| ) -> tuple[float] | tuple[float, float]: | |
| x = trial.suggest_float("x", 0, 1) | |
| if n_objectives == 1: | |
| return (x,) | |
| ) -> float | tuple[float, float]: | |
| x = trial.suggest_float("x", 0, 1) | |
| if n_objectives == 1: | |
| return x |
There was a problem hiding this comment.
Due to the change here:
Your change will cause an error.
kAIto47802
left a comment
There was a problem hiding this comment.
Thank you for the comment. LGTM!
Motivation
This PR adds unit tests for constrained multi-objective
GPSampler.