commenting and hyperparameters, also training dataset option

finish commenting, included some other hyperparameters that could be handful
Included the possibility to refer to some past optimization data to skip training
This commit is contained in:
castaneda 2025-03-21 16:05:18 +01:00
parent a9fd8cebed
commit c50b21677b

View File

@ -40,6 +40,11 @@ if __name__ == '__main__':
#if no value is to be set from here globalPar must be an empty dictionary
globalPar = {}
# $$$ TRAINING DATASET $$$
# if previous run of the same optimization -meaning with the same type of optimizer, same type and number of input parameters- have been performed, the past dataset can be fed to skip the initial training
# indicate the path for the learner archive file, to be found in the /M-LOOP_archives/ folder
training_filename =
# $$$ INPUT PARAMETERS $$$
#indicate variables to be optimized, in the following we will call them "input parameters"
@ -85,17 +90,25 @@ if __name__ == '__main__':
num_params = num_params,
min_boundary = min_boundary, max_boundary = max_boundary,
first_params = inputPar,
param_names = inputPar_names,
param_names = inputPar_names,
#if retrieving dataset from previous runs
training_filename = training_filename,
#other settings
# %of allowed variation (from 0 to 1) - wrt each parameter range - from current best parameters found, limits the exploration around the current global minimum of the cost function
trust_region = ,
trust_region = 1,
#output parameters over which cost is computed are noisy quantities
cost_has_noise = True,
#if False, waits for the experiment to be performed every time so that every new optimization iteration trains on an enlarged training set
no_delay = False)
#for other possible settings for the optimizer see documentation https://m-loop.readthedocs.io/en/latest/tutorials.html
no_delay = False,
default_bad_cost = 0, #default cost for bad run
default_bad_uncertainty = 0, #default uncertainty for bad run
update_hyperparameters = True #whether hyperparameters should be tuned to avoid overfitting. Default False.
#for other possible settings for the optimizer see documentation https://m-loop.readthedocs.io/en/latest/tutorials.html
)
#To run M-LOOP and find the optimal parameters just use the controller method optimize
controller.optimize()