Update scripts/NNDy.py

first extended commenting
This commit is contained in:
castaneda 2025-03-21 14:43:18 +01:00
parent c9bba34ba6
commit 48d73f6811

View File

@ -1,89 +1,108 @@
import labscript
import numpy as np
import pandas as pd
#Imports for M-LOOP
import mloop.interfaces as mli
import mloop.controllers as mlc
import mloop.visualizations as mlv
#import interface
import NNDy_Interface
if __name__ == '__main__':
#indicate name of the sequence to be optimized, to be found in {routine_name}.py
routine_name = 'TestSetup'
#indicate name of the DA file to be run by lyse, {cost_model}.py
cost_model = 'TestDA'
#HALTING CONDITIONS
#indicate maximum number of runs
#max_num_runs = 10
#or one can also indicate max_num_runs_without_better_params
max_num_runs_without_better_params = 50
#indicate target cost
#target_cost = 0
#FIXED GLOBAL VARIABLES
#indicate the values of the global variables that won't be optimized
globalPar = {'T_wlm': 60,
'buffer_time': 10}
# not necessary if they're already set on runmanager
#INPUT PARAMETERS
#indicate the initial values of the global variables to be optimized, in the following called input parameters
#as a dictionary
inputPar = [1e6, 2.5, 2]
num_params = len(inputPar)
inputPar_names = ['delta_freq', 'carrier_amp', 'wait_AWG' ]
#indicate range of input parameters as two dictionaries of length num_params
min_boundary = [-20e6, 0.01, 0]
max_boundary = [20e6, 4.5, 10]
hyperpar = {
'globalPar': globalPar,
'inputPar_names': inputPar_names
}
interface = NNDy_Interface.NNDy_Interface(routine_name, cost_model, hyperpar)
controller = mlc.create_controller(interface,
controller_type = 'neural_net',
#HALTING CONDITIONS
#max_num_runs = max_num_runs,
max_num_runs_without_better_params = max_num_runs_without_better_params,
#target_cost = target_cost,
#INPUT PARAMETERS
num_params = num_params,
#mloop handles the variables as python arrays not as dictionaries so when passing the parameters they're converted into named lists
min_boundary = min_boundary, max_boundary = max_boundary,
first_params = inputPar,
param_names = inputPar_names,
#other settings
#%of allowed variation from current best parameters found
trust_region = 0.5,
#output parameters over which cost is computed are noisy quantities
cost_has_noise = True,
#if False, waits for the experiment to be performed every time so that every new optimization iteration trains on an enlarged training set
no_delay = False)
#for other possible settings for the optimizer see documentation https://m-loop.readthedocs.io/en/latest/tutorials.html
#To run M-LOOP and find the optimal parameters just use the controller method optimize
controller.optimize()
#The results of the optimization will be saved to files and can also be accessed as attributes of the controller.
#print('Best parameters found:')
#print(controller.best_params)
#You can also run the default sets of visualizations for the controller with one command
import labscript
import numpy as np
import pandas as pd
#Imports for M-LOOP
import mloop.interfaces as mli
import mloop.controllers as mlc
import mloop.visualizations as mlv
#import interface
import NNDy_Interface
if __name__ == '__main__':
#indicate name of the sequence to be optimized as {routine_name}.py
#no need to explicit the whole path since the sequence will be looked for in the 'shared_drive' of labscript
routine_name = ''
#indicate complete path to the DA file that contains the cost function definition, e.g. ${THIS_FOLDER}$/{cost_model}.py
cost_model = ''
#see /DA/TestDA.py for instructions on how to write DA file
# $$$ HALTING CONDITIONS $$$
#these 3 halting conditions can be specified in any combination or subset of minimum one of them
#mute the ones that you don't use both here and in the passing to the interface below
#indicate maximum number of runs
max_num_runs =
#indicate max_num_runs_without_better_params
max_num_runs_without_better_params =
#indicate target cost
target_cost =
# $$$ FIXED GLOBAL VARIABLES $$$
#it's possible to set from here the values of some or all global variables (already defined in runmanager - see runmanager documentation for more details) that will NOT be optimized
#globalPar must be a dictionary
#setting them here will override the values set in runmanager (remember that runmanager, blacs and lyse must be open and correctly setup before you can run this script -
#it's good practice to execute the sequence from runmanager and check the returns of the DA file before running NNDy)
#if no value is to be set from here globalPar must be an empty dictionary
globalPar = {}
# $$$ INPUT PARAMETERS $$$
#indicate variables to be optimized, in the following we will call them "input parameters"
#input parameters are to be chosen among the global variables of the sequence
#indicate in the following the names, in a list
inputPar_names = []
num_params = len(inputPar_names)
#also listed and in the same order indicate the initial values of the parameters
#such values should be checked before, so that they don't map into a sparse region of the parameter space, i.e. they should produce a good signal already
# even better if they're "human optimized" values
inputPar = []
#indicate range of input parameters as two lists of length num_params
#it's recommended to set here the hardware limit for each parameter, and eventually imposing a trust region - see below - to limit the search of the optimizer
min_boundary = []
max_boundary = []
hyperpar = {
'globalPar': globalPar,
'inputPar_names': inputPar_names
}
#your job here is almost done...
#the interface is created recalling the object defined in the NNDy_Interface file
interface = NNDy_Interface.NNDy_Interface(routine_name, cost_model, hyperpar)
#then the controller is fed with the settings defined above
controller = mlc.create_controller(interface,
controller_type = 'neural_net',
#HALTING CONDITIONS, select as needed by commenting
max_num_runs = max_num_runs,
max_num_runs_without_better_params = max_num_runs_without_better_params,
target_cost = target_cost,
#INPUT PARAMETERS
num_params = num_params,
min_boundary = min_boundary, max_boundary = max_boundary,
first_params = inputPar,
param_names = inputPar_names,
#other settings
# %of allowed variation (from 0 to 1) - wrt each parameter range - from current best parameters found, limits the exploration around the current global minimum of the cost function
trust_region = ,
#output parameters over which cost is computed are noisy quantities
cost_has_noise = True,
#if False, waits for the experiment to be performed every time so that every new optimization iteration trains on an enlarged training set
no_delay = False)
#for other possible settings for the optimizer see documentation https://m-loop.readthedocs.io/en/latest/tutorials.html
#To run M-LOOP and find the optimal parameters just use the controller method optimize
controller.optimize()
#The results of the optimization will be saved to files and can also be accessed as attributes of the controller.
#print('Best parameters found:')
#print(controller.best_params)
#You can also run the default sets of visualizations for the controller with one command
mlv.show_all_default_visualizations(controller)