NNDy/scripts/NNDy.py
castaneda 2d61cb5387 trust region = 1 handling
1 is not a valid value, if no trust region is needed comment trust region
2025-03-28 11:24:29 +01:00

121 lines
6.2 KiB
Python

import labscript
import numpy as np
import pandas as pd
#Imports for M-LOOP
import mloop.interfaces as mli
import mloop.controllers as mlc
import mloop.visualizations as mlv
#import interface
import NNDy_Interface
if __name__ == '__main__':
#indicate name of the sequence to be optimized as {routine_name}.py
#no need to explicit the whole path since the sequence will be looked for in the 'shared_drive' of labscript
routine_name = ''
#indicate complete path to the DA file that contains the cost function definition, e.g. ${THIS_FOLDER}$/{cost_model}.py
cost_model = ''
#see /DA/TestDA.py for instructions on how to write DA file
# $$$ HALTING CONDITIONS $$$
#these 3 halting conditions can be specified in any combination or subset of minimum one of them
#mute the ones that you don't use both here and in the passing to the interface below
#indicate maximum number of runs
max_num_runs =
#indicate max_num_runs_without_better_params
max_num_runs_without_better_params =
#indicate target cost
target_cost =
# $$$ FIXED GLOBAL VARIABLES $$$
#it's possible to set from here the values of some or all global variables (already defined in runmanager - see runmanager documentation for more details) that will NOT be optimized
#globalPar must be a dictionary
#setting them here will override the values set in runmanager (remember that runmanager, blacs and lyse must be open and correctly setup before you can run this script -
#it's good practice to execute the sequence from runmanager and check the returns of the DA file before running NNDy)
#if no value is to be set from here globalPar must be an empty dictionary
globalPar = {}
# $$$ TRAINING DATASET $$$
# if previous run of the same optimization -meaning with the same type of optimizer, same type and number of input parameters- have been performed, the past dataset can be fed to skip the initial training
# indicate the path for the learner archive file, to be found in the /M-LOOP_archives/ folder
training_filename =
# $$$ INPUT PARAMETERS $$$
#indicate variables to be optimized, in the following we will call them "input parameters"
#input parameters are to be chosen among the global variables of the sequence
#indicate in the following the names, in a list
inputPar_names = []
num_params = len(inputPar_names)
#also listed and in the same order indicate the initial values of the parameters
#such values should be checked before, so that they don't map into a sparse region of the parameter space, i.e. they should produce a good signal already
# even better if they're "human optimized" values
inputPar = []
#indicate range of input parameters as two lists of length num_params
#it's recommended to set here the hardware limit for each parameter, and eventually imposing a trust region - see below - to limit the search of the optimizer
min_boundary = []
max_boundary = []
hyperpar = {
'globalPar': globalPar,
'inputPar_names': inputPar_names
}
#your job here is almost done...
#the interface is created recalling the object defined in the NNDy_Interface file
interface = NNDy_Interface.NNDy_Interface(routine_name, cost_model, hyperpar)
#then the controller is fed with the settings defined above
controller = mlc.create_controller(interface,
controller_type = 'neural_net',
#HALTING CONDITIONS, select as needed by commenting
max_num_runs = max_num_runs,
max_num_runs_without_better_params = max_num_runs_without_better_params,
target_cost = target_cost,
#INPUT PARAMETERS
num_params = num_params,
min_boundary = min_boundary, max_boundary = max_boundary,
first_params = inputPar,
param_names = inputPar_names,
#if retrieving dataset from previous runs
training_filename = training_filename,
#other settings
# %of allowed variation (from 0 to 1) - wrt each parameter range - from current best parameters found, limits the exploration around the current global minimum of the cost function
trust_region = 1, # 1 is not a valid value, if no trust region is needed comment this whole line
#output parameters over which cost is computed are noisy quantities
cost_has_noise = True,
#if False, waits for the experiment to be performed every time so that every new optimization iteration trains on an enlarged training set
no_delay = False,
default_bad_cost = 0, #default cost for bad run
default_bad_uncertainty = 0, #default uncertainty for bad run
update_hyperparameters = True #whether hyperparameters should be tuned to avoid overfitting. Default False.
#for other possible settings for the optimizer see documentation https://m-loop.readthedocs.io/en/latest/tutorials.html
)
#To run M-LOOP and find the optimal parameters just use the controller method optimize
controller.optimize()
#The results of the optimization will be saved to files and can also be accessed as attributes of the controller.
#print('Best parameters found:')
#print(controller.best_params)
#You can also run the default sets of visualizations for the controller with one command
mlv.show_all_default_visualizations(controller)