@@ -599,8 +599,8 @@ def get_next_trial(
599
599
# TODO[T79183560]: Ensure correct handling of generator run when using
600
600
# foreign keys.
601
601
self ._update_generation_strategy_in_db_if_possible (
602
- generation_strategy = self .generation_strategy ,
603
- new_generator_runs = [self .generation_strategy ._generator_runs [- 1 ]],
602
+ generation_strategy = self .standard_generation_strategy ,
603
+ new_generator_runs = [self .standard_generation_strategy ._generator_runs [- 1 ]],
604
604
)
605
605
return none_throws (trial .arm ).parameters , trial .index
606
606
@@ -625,7 +625,7 @@ def get_current_trial_generation_limit(self) -> tuple[int, bool]:
625
625
if self .generation_strategy ._experiment is None :
626
626
self .generation_strategy .experiment = self .experiment
627
627
628
- return self .generation_strategy .current_generator_run_limit ()
628
+ return self .standard_generation_strategy .current_generator_run_limit ()
629
629
630
630
def get_next_trials (
631
631
self ,
@@ -950,7 +950,7 @@ def get_max_parallelism(self) -> list[tuple[int, int]]:
950
950
Mapping of form {num_trials -> max_parallelism_setting}.
951
951
"""
952
952
parallelism_settings = []
953
- for step in self .generation_strategy ._steps :
953
+ for step in self .standard_generation_strategy ._steps :
954
954
parallelism_settings .append (
955
955
(step .num_trials , step .max_parallelism or step .num_trials )
956
956
)
@@ -1071,15 +1071,15 @@ def get_contour_plot(
1071
1071
raise ValueError (
1072
1072
f'Metric "{ metric_name } " is not associated with this optimization.'
1073
1073
)
1074
- if self .generation_strategy .model is not None :
1074
+ if self .standard_generation_strategy .model is not None :
1075
1075
try :
1076
1076
logger .info (
1077
1077
f"Retrieving contour plot with parameter '{ param_x } ' on X-axis "
1078
1078
f"and '{ param_y } ' on Y-axis, for metric '{ metric_name } '. "
1079
1079
"Remaining parameters are affixed to the middle of their range."
1080
1080
)
1081
1081
return plot_contour (
1082
- model = none_throws (self .generation_strategy .model ),
1082
+ model = none_throws (self .standard_generation_strategy .model ),
1083
1083
param_x = param_x ,
1084
1084
param_y = param_y ,
1085
1085
metric_name = metric_name ,
@@ -1089,8 +1089,8 @@ def get_contour_plot(
1089
1089
# Some models don't implement '_predict', which is needed
1090
1090
# for the contour plots.
1091
1091
logger .info (
1092
- f"Model { self .generation_strategy .model } does not implement "
1093
- "`predict`, so it cannot be used to generate a response "
1092
+ f"Model { self .standard_generation_strategy .model } does not "
1093
+ "implement `predict`, so it cannot be used to generate a response "
1094
1094
"surface plot."
1095
1095
)
1096
1096
raise UnsupportedPlotError (
@@ -1112,14 +1112,14 @@ def get_feature_importances(self, relative: bool = True) -> AxPlotConfig:
1112
1112
"""
1113
1113
if not self .experiment .trials :
1114
1114
raise ValueError ("Cannot generate plot as there are no trials." )
1115
- cur_model = self .generation_strategy .model
1115
+ cur_model = self .standard_generation_strategy .model
1116
1116
if cur_model is not None :
1117
1117
try :
1118
1118
return plot_feature_importance_by_feature (cur_model , relative = relative )
1119
1119
except NotImplementedError :
1120
1120
logger .info (
1121
- f"Model { self .generation_strategy .model } does not implement "
1122
- "`feature_importances`, so it cannot be used to generate "
1121
+ f"Model { self .standard_generation_strategy .model } does not "
1122
+ "implement `feature_importances`, so it cannot be used to generate "
1123
1123
"this plot. Only certain models, implement feature importances."
1124
1124
)
1125
1125
@@ -1247,7 +1247,8 @@ def get_model_predictions(
1247
1247
else set (none_throws (self .experiment .metrics ).keys ())
1248
1248
)
1249
1249
model = none_throws (
1250
- self .generation_strategy .model , "No model has been instantiated yet."
1250
+ self .standard_generation_strategy .model ,
1251
+ "No model has been instantiated yet." ,
1251
1252
)
1252
1253
1253
1254
# Construct a dictionary that maps from a label to an
@@ -1306,8 +1307,8 @@ def fit_model(self) -> None:
1306
1307
"At least one trial must be completed with data to fit a model."
1307
1308
)
1308
1309
# Check if we should transition before generating the next candidate.
1309
- self .generation_strategy ._maybe_transition_to_next_node ()
1310
- self .generation_strategy ._fit_current_model (data = None )
1310
+ self .standard_generation_strategy ._maybe_transition_to_next_node ()
1311
+ self .standard_generation_strategy ._fit_current_model (data = None )
1311
1312
1312
1313
def verify_trial_parameterization (
1313
1314
self , trial_index : int , parameterization : TParameterization
@@ -1496,29 +1497,10 @@ def from_json_snapshot(
1496
1497
1497
1498
# ---------------------- Private helper methods. ---------------------
1498
1499
1499
- @property
1500
- def experiment (self ) -> Experiment :
1501
- """Returns the experiment set on this Ax client."""
1502
- return none_throws (
1503
- self ._experiment ,
1504
- (
1505
- "Experiment not set on Ax client. Must first "
1506
- "call load_experiment or create_experiment to use handler functions."
1507
- ),
1508
- )
1509
-
1510
1500
def get_trial (self , trial_index : int ) -> Trial :
1511
1501
"""Return a trial on experiment cast as Trial"""
1512
1502
return checked_cast (Trial , self .experiment .trials [trial_index ])
1513
1503
1514
- @property
1515
- def generation_strategy (self ) -> GenerationStrategy :
1516
- """Returns the generation strategy, set on this experiment."""
1517
- return none_throws (
1518
- self ._generation_strategy ,
1519
- "No generation strategy has been set on this optimization yet." ,
1520
- )
1521
-
1522
1504
@property
1523
1505
def objective (self ) -> Objective :
1524
1506
return none_throws (self .experiment .optimization_config ).objective
@@ -1586,7 +1568,7 @@ def get_best_trial(
1586
1568
) -> tuple [int , TParameterization , TModelPredictArm | None ] | None :
1587
1569
return self ._get_best_trial (
1588
1570
experiment = self .experiment ,
1589
- generation_strategy = self .generation_strategy ,
1571
+ generation_strategy = self .standard_generation_strategy ,
1590
1572
trial_indices = trial_indices ,
1591
1573
use_model_predictions = use_model_predictions ,
1592
1574
)
@@ -1600,7 +1582,7 @@ def get_pareto_optimal_parameters(
1600
1582
) -> dict [int , tuple [TParameterization , TModelPredictArm ]]:
1601
1583
return self ._get_pareto_optimal_parameters (
1602
1584
experiment = self .experiment ,
1603
- generation_strategy = self .generation_strategy ,
1585
+ generation_strategy = self .standard_generation_strategy ,
1604
1586
trial_indices = trial_indices ,
1605
1587
use_model_predictions = use_model_predictions ,
1606
1588
)
@@ -1614,7 +1596,7 @@ def get_hypervolume(
1614
1596
) -> float :
1615
1597
return BestPointMixin ._get_hypervolume (
1616
1598
experiment = self .experiment ,
1617
- generation_strategy = self .generation_strategy ,
1599
+ generation_strategy = self .standard_generation_strategy ,
1618
1600
optimization_config = optimization_config ,
1619
1601
trial_indices = trial_indices ,
1620
1602
use_model_predictions = use_model_predictions ,
@@ -1817,7 +1799,7 @@ def _gen_new_generator_run(
1817
1799
else None
1818
1800
)
1819
1801
with with_rng_seed (seed = self ._random_seed ):
1820
- return none_throws (self .generation_strategy ).gen (
1802
+ return none_throws (self .standard_generation_strategy ).gen (
1821
1803
experiment = self .experiment ,
1822
1804
n = n ,
1823
1805
pending_observations = self ._get_pending_observation_features (
0 commit comments