334 lines
9.9 KiB
Matlab
334 lines
9.9 KiB
Matlab
%% Direction solution to the unconstrained optimization problem with the Conjugate Gradient technique
|
|
clc;
|
|
clear;
|
|
format long;
|
|
|
|
% Define the function (now accepts a 3D array)
|
|
f = @(X) sum(X(:).^4) - 4*sum(X(:).^3) + 6*sum(X(:).^2) + 5;
|
|
|
|
% Initial Guess: Now we use a 3D array
|
|
x_o = rand(3, 3, 3); % Random 3D array as the initial guess
|
|
fprintf('Initial Objective Function Value: %.6f\n\n', f(x_o));
|
|
|
|
% Convergence Criteria:
|
|
Epsilon = 1E-5;
|
|
|
|
% Gradient Calculation (numerical)
|
|
J = compute_gradient(f, x_o); % Gradient at initial point
|
|
S = -J; % Initial search direction
|
|
|
|
% Iteration Counter:
|
|
i = 1;
|
|
|
|
% Minimization
|
|
while norm(S(:)) > Epsilon % Halting Criterion
|
|
% Step Size Optimization (Line Search)
|
|
lambda = optimize_step_size(f, x_o, S);
|
|
|
|
% Update the solution
|
|
x_o_new = x_o + lambda * S;
|
|
|
|
% Update the gradient and search direction
|
|
J_old = J;
|
|
J = compute_gradient(f, x_o_new);
|
|
S = update_search_direction(S, J, J_old);
|
|
|
|
% Update for next iteration
|
|
x_o = x_o_new;
|
|
i = i + 1;
|
|
end
|
|
|
|
% Output
|
|
fprintf('Number of Iterations for Convergence: %d\n\n', i);
|
|
fprintf('Objective Function Minimum Value: %.6f\n\n', f(x_o));
|
|
|
|
|
|
%% Visualize the optimization
|
|
|
|
clc;
|
|
clear;
|
|
format long;
|
|
|
|
% Define the function (now accepts a 3D array)
|
|
f = @(X) sum(X(:).^4) - 4*sum(X(:).^3) + 6*sum(X(:).^2) + 5;
|
|
|
|
% Initial Guess: Now we use a 3D array
|
|
x_o = rand(3, 3, 3); % Random 3D array as the initial guess
|
|
fprintf('Initial Objective Function Value: %.6f\n\n', f(x_o));
|
|
|
|
% Convergence Criteria:
|
|
Epsilon = 1E-5;
|
|
|
|
% Gradient Calculation (numerical)
|
|
J = compute_gradient(f, x_o); % Gradient at initial point
|
|
S = -J; % Initial search direction
|
|
|
|
% Iteration Counter:
|
|
i = 1;
|
|
|
|
% Prepare for visualization
|
|
fig = figure;
|
|
clf
|
|
set(gcf,'Position',[50 50 950 750])
|
|
set(gca,'FontSize',16,'Box','On','Linewidth',2);
|
|
hold on;
|
|
|
|
% Create a mesh grid for visualization
|
|
[X1, X2] = meshgrid(-2:0.1:2, -2:0.1:2); % Adjust domain based on your function
|
|
Z = arrayfun(@(x1, x2) f([x1, x2, 0]), X1, X2); % Example 2D slice: f([X1, X2, 0])
|
|
|
|
% Plot the surface
|
|
surf(X1, X2, Z);
|
|
xlabel('X1');
|
|
ylabel('X2');
|
|
zlabel('f(X)');
|
|
title('Conjugate Gradient Optimization Process');
|
|
colorbar;
|
|
scatter3(x_o(1), x_o(2), f(x_o), 'r', 'filled'); % Initial point
|
|
|
|
% Set the 3D view
|
|
view(75, 30); % Example 3D view: azimuth 45°, elevation 30°
|
|
|
|
% Minimization with visualization
|
|
while norm(S(:)) > Epsilon % Halting Criterion
|
|
% Step Size Optimization (Line Search)
|
|
lambda = optimize_step_size(f, x_o, S);
|
|
|
|
% Update the solution
|
|
x_o_new = x_o + lambda * S;
|
|
|
|
% Update the gradient and search direction
|
|
J_old = J;
|
|
J = compute_gradient(f, x_o_new);
|
|
S = update_search_direction(S, J, J_old);
|
|
|
|
% Update for next iteration
|
|
x_o = x_o_new;
|
|
i = i + 1;
|
|
|
|
% Visualization Update
|
|
scatter3(x_o(1), x_o(2), f(x_o), 'g', 'filled'); % Current point
|
|
drawnow; % Update the plot
|
|
end
|
|
|
|
% Output
|
|
fprintf('Number of Iterations for Convergence: %d\n\n', i);
|
|
fprintf('Objective Function Minimum Value: %.6f\n\n', f(x_o));
|
|
|
|
%%
|
|
% Numerical Gradient Calculation using the finite differences method for 3D
|
|
function J = compute_gradient(f, X)
|
|
epsilon = 1E-5; % Step size for numerical differentiation
|
|
J = zeros(size(X)); % Gradient with the same size as the input
|
|
|
|
% Loop over all elements of the 3D array
|
|
for i = 1:numel(X)
|
|
X1 = X;
|
|
X2 = X;
|
|
% Perturb the element X(i) in both directions
|
|
X1(i) = X1(i) + epsilon;
|
|
X2(i) = X2(i) - epsilon;
|
|
% Central difference formula for gradient
|
|
J(i) = (f(X1) - f(X2)) / (2 * epsilon);
|
|
end
|
|
end
|
|
|
|
% Line Search (Step Size Optimization)
|
|
function lambda = optimize_step_size(f, X, S)
|
|
alpha = 1; % Initial step size
|
|
rho = 0.5; % Step size reduction factor
|
|
c = 1E-4; % Armijo condition constant
|
|
max_iter = 100; % Max iterations for backtracking
|
|
|
|
for k = 1:max_iter
|
|
% Check if the Armijo condition is satisfied
|
|
grad = compute_gradient(f, X); % Compute gradient
|
|
if f(X + alpha * S) <= f(X) + c * alpha * (S(:)' * grad(:))
|
|
break;
|
|
else
|
|
alpha = rho * alpha; % Reduce the step size
|
|
end
|
|
end
|
|
lambda = alpha; % Return the optimized step size
|
|
end
|
|
|
|
% Update Search Direction (Polak-Ribiere method)
|
|
function S_new = update_search_direction(S, J_new, J_old)
|
|
beta = (norm(J_new(:))^2) / (norm(J_old(:))^2);
|
|
S_new = -J_new + beta * S;
|
|
end
|
|
|
|
|
|
%{
|
|
function [psi] = minimizeEnergyFunctional(this,psi,Params,Transf,VDk,V,Observ)
|
|
|
|
format long;
|
|
|
|
% Define the function handle
|
|
f = @(X) this.Calculator.calculateTotalEnergy(X, Params, Transf, VDk, V)/Params.N;
|
|
|
|
% Convergence Criteria:
|
|
Epsilon = 1E-5;
|
|
|
|
% Iteration Counter:
|
|
i = 1;
|
|
Observ.residual = 1;
|
|
Observ.res = 1;
|
|
|
|
% Initialize the PrematureExitFlag to false
|
|
PrematureExitFlag = false;
|
|
|
|
if this.PlotLive
|
|
Plotter.plotLive(psi,Params,Transf,Observ)
|
|
drawnow
|
|
end
|
|
|
|
% Minimization Loop
|
|
while true
|
|
% Compute gradient
|
|
J = compute_gradient(psi, Params, Transf, VDk, V);
|
|
|
|
% Check stopping criterion (Gradient norm)
|
|
if norm(J(:)) < Epsilon
|
|
disp('Tolerance reached: Gradient norm is below the specified epsilon.');
|
|
PrematureExitFlag = true; % Set flag to indicate premature exit
|
|
break;
|
|
elseif i >= this.MaxIterationsForCGD
|
|
disp('Maximum number of iterations for CGD reached.');
|
|
PrematureExitFlag = true; % Set flag to indicate premature exit
|
|
break;
|
|
end
|
|
|
|
% Initialize search direction if first iteration
|
|
if i == 1
|
|
S = -J;
|
|
else
|
|
% Update search direction
|
|
S = update_search_direction(S, J, J_old);
|
|
end
|
|
|
|
% Step Size Optimization (Line Search)
|
|
alpha = optimize_step_size(f, psi, S, Params, Transf, VDk, V);
|
|
|
|
% Update solution
|
|
psi = psi + alpha * S;
|
|
|
|
% Normalize psi
|
|
Norm = sum(abs(psi(:)).^2) * Transf.dx * Transf.dy * Transf.dz;
|
|
psi = sqrt(Params.N) * psi / sqrt(Norm);
|
|
|
|
% Store old gradient
|
|
J_old = J;
|
|
i = i + 1;
|
|
muchem = this.Calculator.calculateChemicalPotential(psi,Params,Transf,VDk,V);
|
|
|
|
if mod(i,500) == 0
|
|
|
|
% Change in Energy
|
|
E = this.Calculator.calculateTotalEnergy(psi,Params,Transf,VDk,V);
|
|
E = E/Norm;
|
|
Observ.EVec = [Observ.EVec E];
|
|
|
|
% Chemical potential
|
|
Observ.mucVec = [Observ.mucVec muchem];
|
|
|
|
% Normalized residuals
|
|
res = this.Calculator.calculateNormalizedResiduals(psi,Params,Transf,VDk,V,muchem);
|
|
Observ.residual = [Observ.residual res];
|
|
Observ.res_idx = Observ.res_idx + 1;
|
|
|
|
if this.PlotLive
|
|
Plotter.plotLive(psi,Params,Transf,Observ)
|
|
drawnow
|
|
end
|
|
|
|
save(sprintf(strcat(this.SaveDirectory, '/Run_%03i/psi_gs.mat'),Params.njob),'psi','muchem','Observ','Transf','Params','VDk','V');
|
|
end
|
|
end
|
|
|
|
% Check if loop ended prematurely
|
|
if PrematureExitFlag
|
|
disp('Optimizer ended prematurely without convergence to a minimum.');
|
|
else
|
|
fprintf('Minimum found! Number of Iterations for Convergence: %d\n\n', i);
|
|
end
|
|
|
|
% Change in Energy
|
|
E = this.Calculator.calculateTotalEnergy(psi,Params,Transf,VDk,V);
|
|
E = E/Norm;
|
|
Observ.EVec = [Observ.EVec E];
|
|
|
|
disp('Saving data...');
|
|
save(sprintf(strcat(this.SaveDirectory, '/Run_%03i/psi_gs.mat'),Params.njob),'psi','muchem','Observ','Transf','Params','VDk','V');
|
|
disp('Save complete!');
|
|
|
|
end
|
|
|
|
%% Helper functions
|
|
% Numerical Gradient Calculation using the finite differences method
|
|
function J = compute_gradient(psi, Params, Transf, VDk, V)
|
|
|
|
% Operators
|
|
|
|
% Kinetic energy
|
|
KEop = 0.5 * (Transf.KX.^2+Transf.KY.^2+Transf.KZ.^2);
|
|
HKin = @(w) real(ifft(KEop.*fft(w)));
|
|
|
|
% Trap Potential
|
|
HV = @(w) V.*w;
|
|
|
|
% Contact interactions
|
|
Hint = @(w) (Params.gs*abs(psi).^2).*w;
|
|
|
|
% DDIs
|
|
frho = fftn(abs(psi).^2);
|
|
Phi = real(ifftn(frho.*VDk));
|
|
Hddi = @(w) (Params.gdd*Phi).*w;
|
|
|
|
% Quantum fluctuations
|
|
Hqf = @(w) (Params.gammaQF*abs(psi).^3).*w;
|
|
|
|
H = @(w) HKin(w) + HV(w) + Hint(w) + Hddi(w) + Hqf(w);
|
|
|
|
J = H(psi);
|
|
|
|
end
|
|
|
|
% Line Search (Step Size Optimization)
|
|
function alpha = optimize_step_size(f, X, S, Params, Transf, VDk, V)
|
|
alpha = 1; % Initial step size
|
|
rho = 0.5; % Step size reduction factor
|
|
c = 1E-4; % Armijo condition constant
|
|
max_iter = 100; % Max iterations for backtracking
|
|
tol = 1E-4; % Tolerance for stopping
|
|
|
|
grad = compute_gradient(X, Params, Transf, VDk, V); % Compute gradient once
|
|
f_X = f(X); % Evaluate f(X) once
|
|
|
|
for k = 1:max_iter
|
|
% Evaluate Armijo condition with precomputed f(X) and grad
|
|
if f(X + alpha * S) <= f_X + c * alpha * (S(:)' * grad(:))
|
|
break;
|
|
else
|
|
alpha = rho * alpha; % Reduce the step size
|
|
end
|
|
|
|
% Early stopping if step size becomes too small
|
|
if alpha < tol
|
|
break;
|
|
end
|
|
end
|
|
|
|
end
|
|
|
|
% Update Search Direction
|
|
function S_new = update_search_direction(S, J_new, J_old)
|
|
% (Fletcher-Reeves method)
|
|
% beta = (norm(J_new(:))^2) / (norm(J_old(:))^2);
|
|
% S_new = -J_new + beta * S;
|
|
|
|
% (Polak-Ribiere method)
|
|
beta = max(0, (J_new(:)' * (J_new(:) - J_old(:))) / (norm(J_old(:))^2));
|
|
S_new = -J_new + beta * S;
|
|
end
|
|
%} |