89 lines
3.7 KiB
Python
89 lines
3.7 KiB
Python
import labscript
|
|
import numpy as np
|
|
import pandas as pd
|
|
|
|
#Imports for M-LOOP
|
|
import mloop.interfaces as mli
|
|
import mloop.controllers as mlc
|
|
import mloop.visualizations as mlv
|
|
|
|
|
|
#import interface
|
|
import NNDy_Interface
|
|
|
|
|
|
if __name__ == '__main__':
|
|
#indicate name of the sequence to be optimized, to be found in {routine_name}.py
|
|
routine_name = 'TestSetup'
|
|
#indicate name of the DA file to be run by lyse, {cost_model}.py
|
|
cost_model = 'TestDA'
|
|
|
|
|
|
#HALTING CONDITIONS
|
|
#indicate maximum number of runs
|
|
#max_num_runs = 10
|
|
#or one can also indicate max_num_runs_without_better_params
|
|
max_num_runs_without_better_params = 50
|
|
#indicate target cost
|
|
#target_cost = 0
|
|
|
|
#FIXED GLOBAL VARIABLES
|
|
#indicate the values of the global variables that won't be optimized
|
|
globalPar = {'T_wlm': 60,
|
|
'buffer_time': 10}
|
|
# not necessary if they're already set on runmanager
|
|
|
|
|
|
#INPUT PARAMETERS
|
|
#indicate the initial values of the global variables to be optimized, in the following called input parameters
|
|
#as a dictionary
|
|
inputPar = [1e6, 2.5, 2]
|
|
num_params = len(inputPar)
|
|
inputPar_names = ['delta_freq', 'carrier_amp', 'wait_AWG' ]
|
|
|
|
#indicate range of input parameters as two dictionaries of length num_params
|
|
min_boundary = [-20e6, 0.01, 0]
|
|
|
|
max_boundary = [20e6, 4.5, 10]
|
|
|
|
|
|
hyperpar = {
|
|
'globalPar': globalPar,
|
|
'inputPar_names': inputPar_names
|
|
}
|
|
|
|
|
|
|
|
interface = NNDy_Interface.NNDy_Interface(routine_name, cost_model, hyperpar)
|
|
controller = mlc.create_controller(interface,
|
|
controller_type = 'neural_net',
|
|
#HALTING CONDITIONS
|
|
#max_num_runs = max_num_runs,
|
|
max_num_runs_without_better_params = max_num_runs_without_better_params,
|
|
#target_cost = target_cost,
|
|
#INPUT PARAMETERS
|
|
num_params = num_params,
|
|
#mloop handles the variables as python arrays not as dictionaries so when passing the parameters they're converted into named lists
|
|
min_boundary = min_boundary, max_boundary = max_boundary,
|
|
first_params = inputPar,
|
|
|
|
param_names = inputPar_names,
|
|
|
|
#other settings
|
|
#%of allowed variation from current best parameters found
|
|
trust_region = 0.5,
|
|
#output parameters over which cost is computed are noisy quantities
|
|
cost_has_noise = True,
|
|
#if False, waits for the experiment to be performed every time so that every new optimization iteration trains on an enlarged training set
|
|
no_delay = False)
|
|
#for other possible settings for the optimizer see documentation https://m-loop.readthedocs.io/en/latest/tutorials.html
|
|
|
|
#To run M-LOOP and find the optimal parameters just use the controller method optimize
|
|
controller.optimize()
|
|
|
|
#The results of the optimization will be saved to files and can also be accessed as attributes of the controller.
|
|
#print('Best parameters found:')
|
|
#print(controller.best_params)
|
|
|
|
#You can also run the default sets of visualizations for the controller with one command
|
|
mlv.show_all_default_visualizations(controller) |