|
| 1 | +""" |
| 2 | +======================== |
| 3 | +Partial Dependence Plots |
| 4 | +======================== |
| 5 | +
|
| 6 | +Sigurd Carlsen Feb 2019 |
| 7 | +Holger Nahrstaedt 2020 |
| 8 | +
|
| 9 | +.. currentmodule:: skopt |
| 10 | +
|
| 11 | +Plot objective now supports optional use of partial dependence as well as |
| 12 | +different methods of defining parameter values for dependency plots. |
| 13 | +""" |
| 14 | +print(__doc__) |
| 15 | +import sys |
| 16 | +from skopt.plots import plot_objective |
| 17 | +from skopt import forest_minimize |
| 18 | +import numpy as np |
| 19 | +np.random.seed(123) |
| 20 | +import matplotlib.pyplot as plt |
| 21 | + |
| 22 | + |
| 23 | +############################################################################# |
| 24 | +# Objective function |
| 25 | +# ================== |
| 26 | +# Plot objective now supports optional use of partial dependence as well as |
| 27 | +# different methods of defining parameter values for dependency plots |
| 28 | + |
| 29 | +# Here we define a function that we evaluate. |
| 30 | +def funny_func(x): |
| 31 | + s = 0 |
| 32 | + for i in range(len(x)): |
| 33 | + s += (x[i] * i) ** 2 |
| 34 | + return s |
| 35 | + |
| 36 | + |
| 37 | +############################################################################# |
| 38 | +# Optimisation using decision trees |
| 39 | +# ================================= |
| 40 | +# We run forest_minimize on the function |
| 41 | +bounds = [(-1, 1.), ] * 3 |
| 42 | +n_calls = 150 |
| 43 | + |
| 44 | +result = forest_minimize(funny_func, bounds, n_calls=n_calls, |
| 45 | + base_estimator="ET", |
| 46 | + random_state=4) |
| 47 | + |
| 48 | +############################################################################# |
| 49 | +# Partial dependence plot |
| 50 | +# ======================= |
| 51 | +# Here we see an example of using partial dependence. Even when setting |
| 52 | +# n_points all the way down to 10 from the default of 40, this method is |
| 53 | +# still very slow. This is because partial dependence calculates 250 extra |
| 54 | +# predictions for each point on the plots. |
| 55 | + |
| 56 | + |
| 57 | +_ = plot_objective(result, n_points=10) |
| 58 | + |
| 59 | +############################################################################# |
| 60 | +# It is possible to change the location of the red dot, which normally shows |
| 61 | +# the position of the found minimum. We can set it 'expected_minimum', |
| 62 | +# which is the minimum value of the surrogate function, obtained by a |
| 63 | +# minimum search method. |
| 64 | + |
| 65 | +_ = plot_objective(result, n_points=10, minimum='expected_minimum') |
| 66 | +############################################################################# |
| 67 | +# Plot without partial dependence |
| 68 | +# =============================== |
| 69 | +# Here we plot without partial dependence. We see that it is a lot faster. |
| 70 | +# Also the values for the other parameters are set to the default "result" |
| 71 | +# which is the parameter set of the best observed value so far. In the case |
| 72 | +# of funny_func this is close to 0 for all parameters. |
| 73 | + |
| 74 | +_ = plot_objective(result, sample_source='result', n_points=10) |
| 75 | + |
| 76 | +############################################################################# |
| 77 | +# Modify the shown minimum |
| 78 | +# ======================== |
| 79 | +# Here we try with setting the `minimum` parameters to something other than |
| 80 | +# "result". First we try with "expected_minimum" which is the set of |
| 81 | +# parameters that gives the miniumum value of the surrogate function, |
| 82 | +# using scipys minimum search method. |
| 83 | + |
| 84 | +_ = plot_objective(result, n_points=10, sample_source='expected_minimum', |
| 85 | + minimum='expected_minimum') |
| 86 | + |
| 87 | +############################################################################# |
| 88 | +# "expected_minimum_random" is a naive way of finding the minimum of the |
| 89 | +# surrogate by only using random sampling: |
| 90 | + |
| 91 | +_ = plot_objective(result, n_points=10, sample_source='expected_minimum_random', |
| 92 | + minimum='expected_minimum_random') |
| 93 | + |
| 94 | +############################################################################# |
| 95 | +# We can also specify how many initial samples are used for the two different |
| 96 | +# "expected_minimum" methods. We set it to a low value in the next examples |
| 97 | +# to showcase how it affects the minimum for the two methods. |
| 98 | + |
| 99 | +_ = plot_objective(result, n_points=10, sample_source='expected_minimum_random', |
| 100 | + minimum='expected_minimum_random', |
| 101 | + n_minimum_search=10) |
| 102 | + |
| 103 | +############################################################################# |
| 104 | + |
| 105 | +_ = plot_objective(result, n_points=10, sample_source="expected_minimum", |
| 106 | + minimum='expected_minimum', n_minimum_search=2) |
| 107 | + |
| 108 | +############################################################################# |
| 109 | +# Set a minimum location |
| 110 | +# ====================== |
| 111 | +# Lastly we can also define these parameters ourself by parsing a list |
| 112 | +# as the minimum argument: |
| 113 | + |
| 114 | +_ = plot_objective(result, n_points=10, sample_source=[1, -0.5, 0.5], |
| 115 | + minimum=[1, -0.5, 0.5]) |
| 116 | + |
| 117 | + |
0 commit comments