8
8
9
9
from collections import deque
10
10
from typing import TYPE_CHECKING , Any
11
+ from warnings import warn
11
12
13
+ import numpy as np
12
14
from sklearn .gaussian_process import GaussianProcessRegressor
13
15
from sklearn .gaussian_process .kernels import Matern
14
16
22
24
if TYPE_CHECKING :
23
25
from collections .abc import Callable , Iterable , Mapping , Sequence
24
26
25
- import numpy as np
26
27
from numpy .random import RandomState
27
28
from numpy .typing import NDArray
28
29
from scipy .optimize import NonlinearConstraint
@@ -166,6 +167,7 @@ def __init__(
166
167
error_msg = "The transformer must be an instance of DomainTransformer"
167
168
raise TypeError (error_msg ) from exc
168
169
170
+ self ._sorting_warning_already_shown = False # TODO: remove in future version
169
171
super ().__init__ (events = DEFAULT_EVENTS )
170
172
171
173
@property
@@ -220,6 +222,17 @@ def register(
220
222
constraint_value: float or None
221
223
Value of the constraint function at the observation, if any.
222
224
"""
225
+ # TODO: remove in future version
226
+ if isinstance (params , np .ndarray ) and not self ._sorting_warning_already_shown :
227
+ msg = (
228
+ "You're attempting to register an np.ndarray. Currently, the optimizer internally sorts"
229
+ " parameters by key and expects any registered array to respect this order. In future"
230
+ " versions this behaviour will change and the order as given by the pbounds dictionary"
231
+ " will be used. If you wish to retain sorted parameters, please manually sort your pbounds"
232
+ " dictionary before constructing the optimizer."
233
+ )
234
+ warn (msg , stacklevel = 1 )
235
+ self ._sorting_warning_already_shown = True
223
236
self ._space .register (params , target , constraint_value )
224
237
self .dispatch (Events .OPTIMIZATION_STEP )
225
238
@@ -239,6 +252,18 @@ def probe(
239
252
If True, the optimizer will evaluate the points when calling
240
253
maximize(). Otherwise it will evaluate it at the moment.
241
254
"""
255
+ # TODO: remove in future version
256
+ if isinstance (params , np .ndarray ) and not self ._sorting_warning_already_shown :
257
+ msg = (
258
+ "You're attempting to register an np.ndarray. Currently, the optimizer internally sorts"
259
+ " parameters by key and expects any registered array to respect this order. In future"
260
+ " versions this behaviour will change and the order as given by the pbounds dictionary"
261
+ " will be used. If you wish to retain sorted parameters, please manually sort your pbounds"
262
+ " dictionary before constructing the optimizer."
263
+ )
264
+ warn (msg , stacklevel = 1 )
265
+ self ._sorting_warning_already_shown = True
266
+ params = self ._space .array_to_params (params )
242
267
if lazy :
243
268
self ._queue .append (params )
244
269
else :
@@ -267,7 +292,8 @@ def _prime_queue(self, init_points: int) -> None:
267
292
init_points = max (init_points , 1 )
268
293
269
294
for _ in range (init_points ):
270
- self ._queue .append (self ._space .random_sample ())
295
+ sample = self ._space .random_sample ()
296
+ self ._queue .append (self ._space .array_to_params (sample ))
271
297
272
298
def _prime_subscriptions (self ) -> None :
273
299
if not any ([len (subs ) for subs in self ._events .values ()]):
0 commit comments