Added new routine to simulate distribution of data and compute its cumulants, additionally made minor modifications to experimental data analysis routines.
This commit is contained in:
parent
e0136d4a19
commit
304758bebe
@ -160,8 +160,8 @@ for k = 1 : length(files)
|
||||
(isempty(bkg_img) && isa(bkg_img, 'double')) || ...
|
||||
(isempty(dark_img) && isa(dark_img, 'double'))
|
||||
|
||||
refimages = nan(size(refimages)); % fill with NaNs
|
||||
absimages = nan(size(absimages));
|
||||
refimages(:,:,k) = nan(size(refimages(:,:,k))); % fill with NaNs
|
||||
absimages(:,:,k) = nan(size(absimages(:,:,k)));
|
||||
else
|
||||
refimages(:,:,k) = subtractBackgroundOffset(cropODImage(bkg_img, center, span), fraction)';
|
||||
absimages(:,:,k) = subtractBackgroundOffset(cropODImage(calculateODImage(atm_img, bkg_img, dark_img, ImagingMode, PulseDuration), center, span), fraction)';
|
||||
|
@ -90,8 +90,8 @@ for k = 1 : length(files)
|
||||
(isempty(bkg_img) && isa(bkg_img, 'double')) || ...
|
||||
(isempty(dark_img) && isa(dark_img, 'double'))
|
||||
|
||||
refimages = nan(size(refimages)); % fill with NaNs
|
||||
absimages = nan(size(absimages));
|
||||
refimages(:,:,k) = nan(size(refimages(:,:,k))); % fill with NaNs
|
||||
absimages(:,:,k) = nan(size(absimages(:,:,k)));
|
||||
else
|
||||
refimages(:,:,k) = subtractBackgroundOffset(cropODImage(bkg_img, center, span), fraction)';
|
||||
absimages(:,:,k) = subtractBackgroundOffset(cropODImage(calculateODImage(atm_img, bkg_img, dark_img, ImagingMode, PulseDuration), center, span), fraction)';
|
||||
|
@ -1,4 +1,4 @@
|
||||
%% ===== Settings =====
|
||||
%% ===== D-S Settings =====
|
||||
groupList = ["/images/MOT_3D_Camera/in_situ_absorption", "/images/ODT_1_Axis_Camera/in_situ_absorption", ...
|
||||
"/images/ODT_2_Axis_Camera/in_situ_absorption", "/images/Horizontal_Axis_Camera/in_situ_absorption", ...
|
||||
"/images/Vertical_Axis_Camera/in_situ_absorption"];
|
||||
@ -45,28 +45,28 @@ zoom_size = 50; % Zoomed-in region around center
|
||||
% Plotting and saving
|
||||
scan_parameter = 'ps_rot_mag_fin_pol_angle';
|
||||
% scan_parameter = 'rot_mag_field';
|
||||
scan_parameter_text = 'Angle = ';
|
||||
% scan_parameter_text = 'BField = ';
|
||||
|
||||
savefileName = 'DropletsToStripes';
|
||||
font = 'Bahnschrift';
|
||||
|
||||
if strcmp(savefileName, 'DropletsToStripes')
|
||||
scan_groups = 0:5:45;
|
||||
scan_groups = 0:1:40;
|
||||
titleString = 'Droplets to Stripes';
|
||||
elseif strcmp(savefileName, 'StripesToDroplets')
|
||||
scan_groups = 45:-5:0;
|
||||
scan_groups = 40:-1:0;
|
||||
titleString = 'Stripes to Droplets';
|
||||
end
|
||||
|
||||
% Flags
|
||||
skipUnshuffling = true;
|
||||
skipNormalization = true;
|
||||
skipUnshuffling = false;
|
||||
skipPreprocessing = true;
|
||||
skipMasking = true;
|
||||
skipIntensityThresholding = true;
|
||||
skipBinarization = true;
|
||||
skipMovieRender = true;
|
||||
skipSaveFigures = true;
|
||||
skipSaveFigures = false;
|
||||
skipSaveOD = true;
|
||||
|
||||
%% ===== Load and compute OD image, rotate and extract ROI for analysis =====
|
||||
% Get a list of all files in the folder with the desired file name pattern.
|
||||
@ -90,15 +90,15 @@ for k = 1 : length(files)
|
||||
(isempty(bkg_img) && isa(bkg_img, 'double')) || ...
|
||||
(isempty(dark_img) && isa(dark_img, 'double'))
|
||||
|
||||
refimages = nan(size(refimages)); % fill with NaNs
|
||||
absimages = nan(size(absimages));
|
||||
refimages(:,:,k) = nan(size(refimages(:,:,k))); % fill with NaNs
|
||||
absimages(:,:,k) = nan(size(absimages(:,:,k)));
|
||||
else
|
||||
refimages(:,:,k) = subtractBackgroundOffset(cropODImage(bkg_img, center, span), fraction)';
|
||||
absimages(:,:,k) = subtractBackgroundOffset(cropODImage(calculateODImage(atm_img, bkg_img, dark_img, ImagingMode, PulseDuration), center, span), fraction)';
|
||||
end
|
||||
end
|
||||
|
||||
%% ===== Fringe removal =====
|
||||
% ===== Fringe removal =====
|
||||
|
||||
if removeFringes
|
||||
optrefimages = removefringesInImage(absimages, refimages);
|
||||
@ -183,18 +183,18 @@ for k = 1:N_shots
|
||||
end
|
||||
|
||||
% Group by scan parameter values (e.g., alpha, angle, etc.)
|
||||
[unique_scan_parameter_values, ~, idx] = unique(scan_parameter_values);
|
||||
N_params = length(unique_scan_parameter_values);
|
||||
[unique_scan_parameter_values, ~, idx] = unique(scan_parameter_values);
|
||||
N_params = length(unique_scan_parameter_values);
|
||||
|
||||
% Define angular range and conversion
|
||||
angle_range = 180;
|
||||
angle_per_bin = angle_range / N_angular_bins;
|
||||
max_peak_angle = 180;
|
||||
max_peak_bin = round(max_peak_angle / angle_per_bin);
|
||||
angle_range = 180;
|
||||
angle_per_bin = angle_range / N_angular_bins;
|
||||
max_peak_angle = 180;
|
||||
max_peak_bin = round(max_peak_angle / angle_per_bin);
|
||||
|
||||
% Parameters for search
|
||||
window_size = 10;
|
||||
angle_threshold = 100;
|
||||
window_size = 10;
|
||||
angle_threshold = 100;
|
||||
|
||||
% Initialize containers for final results
|
||||
mean_max_g2_values = zeros(1, N_params);
|
||||
@ -293,7 +293,7 @@ for val = scan_groups
|
||||
continue;
|
||||
end
|
||||
|
||||
g2_vals = g2_all_per_group{i};
|
||||
g2_vals = max_g2_all_per_group{i};
|
||||
g2_vals = g2_vals(~isnan(g2_vals));
|
||||
|
||||
if isempty(g2_vals)
|
||||
@ -336,7 +336,6 @@ for val = scan_groups
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
%% Plot all cumulants
|
||||
figure(3)
|
||||
set(gcf, 'Color', 'w', 'Position', [100 100 950 750])
|
||||
|
@ -0,0 +1,295 @@
|
||||
%% Evolve to skewed to single gaussian with lower mean
|
||||
clear; clc;
|
||||
|
||||
N_params = 50;
|
||||
N_reps = 500;
|
||||
alpha_values = linspace(0, 45, N_params);
|
||||
all_data = cell(1, N_params);
|
||||
|
||||
% Transition control
|
||||
alpha_start = 5; % where transition begins
|
||||
alpha_end = 40; % where transition ends
|
||||
|
||||
mu_start = 1.2; % high initial mean
|
||||
mu_end = 0.8; % low final mean
|
||||
|
||||
sigma_start = 0.2; % wide initial std
|
||||
sigma_end = 0.07; % narrow final std
|
||||
|
||||
max_skew = 5; % peak skew strength
|
||||
|
||||
% Loop through alpha
|
||||
for i = 1:N_params
|
||||
alpha = alpha_values(i);
|
||||
|
||||
% Normalized transition variable t ∈ [0, 1]
|
||||
t = min(max((alpha - alpha_start) / (alpha_end - alpha_start), 0), 1);
|
||||
|
||||
% Use cosine-based smooth interpolation
|
||||
smooth_t = (1 - cos(pi * t)) / 2; % ease-in-out
|
||||
|
||||
% Mean and sigma interpolation (smoothly decrease)
|
||||
mu = mu_start * (1 - smooth_t) + mu_end * smooth_t;
|
||||
sigma = sigma_start * (1 - smooth_t) + sigma_end * smooth_t;
|
||||
|
||||
% Skewness: sinusoidal profile, max at middle
|
||||
skew_strength = max_skew * sin(t * pi);
|
||||
|
||||
% Generate data
|
||||
if abs(skew_strength) < 1e-2 % near-zero skew, use normal
|
||||
data = normrnd(mu, sigma, [N_reps, 1]);
|
||||
else
|
||||
data = skewnormrnd(mu, sigma, skew_strength, N_reps);
|
||||
end
|
||||
|
||||
all_data{i} = data;
|
||||
|
||||
% Cumulants
|
||||
kappa = computeCumulants(data, 6);
|
||||
mean_vals(i) = kappa(1);
|
||||
var_vals(i) = kappa(2);
|
||||
skew_vals(i) = kappa(3);
|
||||
kurt_vals(i) = kappa(4);
|
||||
kappa5_vals(i) = kappa(5);
|
||||
kappa6_vals(i) = kappa(6);
|
||||
end
|
||||
|
||||
%% Evolve to bimodal
|
||||
clear; clc;
|
||||
|
||||
N_params = 50;
|
||||
N_reps = 500;
|
||||
alpha_values = linspace(0, 45, N_params);
|
||||
|
||||
all_data = cell(1, N_params);
|
||||
|
||||
bimodal_start = 20; % start earlier
|
||||
transition_width = 5; % wider window for smoothness
|
||||
|
||||
for i = 1:N_params
|
||||
alpha = alpha_values(i);
|
||||
|
||||
if alpha < (bimodal_start - transition_width)
|
||||
% Pure skewed unimodal, smaller max skewness for subtlety
|
||||
skew_strength = 3 * (alpha / bimodal_start);
|
||||
data = skewnormrnd(1, 0.1, skew_strength, N_reps);
|
||||
|
||||
elseif alpha <= (bimodal_start + transition_width)
|
||||
% Smooth transition window
|
||||
t = (alpha - (bimodal_start - transition_width)) / (2 * transition_width);
|
||||
|
||||
|
||||
% Weights transition 1 -> 0.7
|
||||
w1 = 1 - 0.3 * t;
|
||||
w2 = 1 - w1;
|
||||
|
||||
% Peak separation smaller (max delta = 0.3)
|
||||
delta_max = 0.3;
|
||||
delta = delta_max * t;
|
||||
|
||||
mu1 = 1 - delta;
|
||||
mu2 = 1 + delta;
|
||||
|
||||
sigma1 = 0.1;
|
||||
sigma2 = 0.1;
|
||||
|
||||
N1 = round(N_reps * w1);
|
||||
N2 = N_reps - N1;
|
||||
|
||||
mode1_samples = normrnd(mu1, sigma1, [N1, 1]);
|
||||
mode2_samples = normrnd(mu2, sigma2, [N2, 1]);
|
||||
|
||||
data = [mode1_samples; mode2_samples];
|
||||
data = data(randperm(length(data)));
|
||||
|
||||
else
|
||||
% After transition: bimodal, but not strongly balanced
|
||||
w1 = 0.7;
|
||||
w2 = 0.3;
|
||||
|
||||
mu1 = 1 - 0.3;
|
||||
mu2 = 1 + 0.3;
|
||||
|
||||
sigma1 = 0.1;
|
||||
sigma2 = 0.1;
|
||||
|
||||
N1 = round(N_reps * w1);
|
||||
N2 = N_reps - N1;
|
||||
|
||||
mode1_samples = normrnd(mu1, sigma1, [N1, 1]);
|
||||
mode2_samples = normrnd(mu2, sigma2, [N2, 1]);
|
||||
|
||||
data = [mode1_samples; mode2_samples];
|
||||
data = data(randperm(length(data)));
|
||||
end
|
||||
|
||||
all_data{i} = data;
|
||||
kappa = computeCumulants(data,6);
|
||||
mean_vals(i) = kappa(1);
|
||||
var_vals(i) = kappa(2);
|
||||
skew_vals(i) = kappa(3);
|
||||
kurt_vals(i) = kappa(4);
|
||||
kappa5_vals(i) = kappa(5);
|
||||
kappa6_vals(i) = kappa(6);
|
||||
end
|
||||
|
||||
%% Animate evolving distribution and cumulant value
|
||||
figure(1); clf;
|
||||
set(gcf, 'Color', 'w', 'Position',[100 100 1300 750])
|
||||
|
||||
for i = 1:N_params
|
||||
clf;
|
||||
|
||||
% PDF
|
||||
subplot(1,2,1); cla; hold on;
|
||||
data = all_data{i};
|
||||
|
||||
% Plot histogram with normalized PDF
|
||||
histogram(data, 'Normalization', 'pdf', 'BinWidth', 0.03, ...
|
||||
'FaceColor', [0.3 0.5 0.8], 'EdgeColor', 'k', 'FaceAlpha', 0.6);
|
||||
|
||||
title(sprintf('Histogram at $\\alpha = %.1f^\\circ$', alpha_values(i)), ...
|
||||
'Interpreter', 'latex', 'FontSize', 16);
|
||||
xlabel('$\mathrm{max}[g^{(2)}]$', 'Interpreter', 'latex', 'FontSize', 14);
|
||||
ylabel('PDF', 'FontSize', 14);
|
||||
set(gca, 'FontSize', 12); grid on;
|
||||
xlim([0.5, 1.8]);
|
||||
|
||||
|
||||
% Cumulant evolution (e.g., Variance)
|
||||
subplot(1,2,2); hold on;
|
||||
plot(alpha_values(1:i), var_vals(1:i), 'bo-', 'LineWidth', 2);
|
||||
title('Cumulant Tracking', 'Interpreter', 'latex', 'FontSize', 16);
|
||||
xlabel('$\alpha$ (degrees)', 'Interpreter', 'latex', 'FontSize', 14);
|
||||
ylabel('$\kappa_2$', 'Interpreter', 'latex', 'FontSize', 14);
|
||||
xlim([0, 45]); grid on;
|
||||
set(gca, 'FontSize', 12);
|
||||
|
||||
pause(0.3);
|
||||
end
|
||||
|
||||
%% === Compute 2D PDF heatmap: f(x, alpha) ===
|
||||
x_grid = linspace(0.5, 1.8, 200); % max[g²] values on y-axis
|
||||
pdf_matrix = zeros(numel(x_grid), N_params); % Now: rows = y, columns = alpha
|
||||
|
||||
for i = 1:N_params
|
||||
data = all_data{i};
|
||||
f = ksdensity(data, x_grid, 'Bandwidth', 0.025);
|
||||
pdf_matrix(:, i) = f; % Transpose for y-axis to be vertical
|
||||
end
|
||||
|
||||
% === Plot PDF vs. alpha heatmap ===
|
||||
figure(2); clf;
|
||||
set(gcf, 'Color', 'w', 'Position',[100 100 950 750])
|
||||
|
||||
imagesc(alpha_values, x_grid, pdf_matrix);
|
||||
set(gca, 'YDir', 'normal'); % Flip y-axis to normal orientation
|
||||
xlabel('$\alpha$ (degrees)', 'Interpreter', 'latex', 'FontSize', 14);
|
||||
ylabel('$\mathrm{max}[g^{(2)}]$', 'Interpreter', 'latex', 'FontSize', 14);
|
||||
title('Evolving PDF of $\mathrm{max}[g^{(2)}]$', ...
|
||||
'Interpreter', 'latex', 'FontSize', 16);
|
||||
|
||||
colormap(Colormaps.coolwarm()); % More aesthetic than default
|
||||
colorbar;
|
||||
c = colorbar;
|
||||
ylabel(c, 'PDF', 'FontSize', 14, 'Interpreter', 'latex');
|
||||
set(gca, 'FontSize', 14);
|
||||
|
||||
%% === Plotting ===
|
||||
figure(1)
|
||||
set(gcf, 'Color', 'w', 'Position', [100 100 950 750])
|
||||
|
||||
scan_vals = alpha_values; % your parameter sweep values
|
||||
|
||||
% Define font style for consistency
|
||||
axis_fontsize = 14;
|
||||
label_fontsize = 16;
|
||||
title_fontsize = 16;
|
||||
|
||||
% 1. Mean with error bars (if you have error data, else just plot)
|
||||
% If no error, replace errorbar with plot or omit error data
|
||||
% For now, no error bars assumed
|
||||
subplot(3,2,1);
|
||||
plot(scan_vals, mean_vals, 'o-', 'LineWidth', 1.5, 'MarkerSize', 6);
|
||||
title('Mean of Distribution', 'FontSize', title_fontsize, 'Interpreter', 'latex');
|
||||
xlabel('$\alpha$ (degrees)', 'Interpreter', 'latex', 'FontSize', label_fontsize);
|
||||
ylabel('$\kappa_1$', 'Interpreter', 'latex', 'FontSize', label_fontsize);
|
||||
set(gca, 'FontSize', axis_fontsize);
|
||||
grid on;
|
||||
|
||||
% 2. Variance
|
||||
subplot(3,2,2);
|
||||
plot(scan_vals, var_vals, 's-', 'LineWidth', 1.5, 'MarkerSize', 6);
|
||||
title('Variance of Distribution', 'FontSize', title_fontsize, 'Interpreter', 'latex');
|
||||
xlabel('$\alpha$ (degrees)', 'Interpreter', 'latex', 'FontSize', label_fontsize);
|
||||
ylabel('$\kappa_2$', 'Interpreter', 'latex', 'FontSize', label_fontsize);
|
||||
set(gca, 'FontSize', axis_fontsize);
|
||||
grid on;
|
||||
|
||||
% 3. Skewness
|
||||
subplot(3,2,3);
|
||||
plot(scan_vals, skew_vals, 'd-', 'LineWidth', 1.5, 'MarkerSize', 6);
|
||||
title('Skewness of Distribution', 'FontSize', title_fontsize, 'Interpreter', 'latex');
|
||||
xlabel('$\alpha$ (degrees)', 'Interpreter', 'latex', 'FontSize', label_fontsize);
|
||||
ylabel('$\kappa_3$', 'Interpreter', 'latex', 'FontSize', label_fontsize);
|
||||
set(gca, 'FontSize', axis_fontsize);
|
||||
grid on;
|
||||
|
||||
% 4. Kurtosis
|
||||
subplot(3,2,4);
|
||||
plot(scan_vals, kurt_vals, '^-', 'LineWidth', 1.5, 'MarkerSize', 6);
|
||||
title('Kurtosis of Distribution', 'FontSize', title_fontsize, 'Interpreter', 'latex');
|
||||
xlabel('$\alpha$ (degrees)', 'Interpreter', 'latex', 'FontSize', label_fontsize);
|
||||
ylabel('$\kappa_4$', 'Interpreter', 'latex', 'FontSize', label_fontsize);
|
||||
set(gca, 'FontSize', axis_fontsize);
|
||||
grid on;
|
||||
|
||||
% 5. 5th-order cumulant
|
||||
subplot(3,2,5);
|
||||
plot(scan_vals, kappa5_vals, 'v-', 'LineWidth', 1.5, 'MarkerSize', 6);
|
||||
title('Fifth-order Cumulant of Distribution', 'FontSize', title_fontsize, 'Interpreter', 'latex');
|
||||
xlabel('$\alpha$ (degrees)', 'Interpreter', 'latex', 'FontSize', label_fontsize);
|
||||
ylabel('$\kappa_5$', 'Interpreter', 'latex', 'FontSize', label_fontsize);
|
||||
set(gca, 'FontSize', axis_fontsize);
|
||||
grid on;
|
||||
|
||||
% 6. 6th-order cumulant
|
||||
subplot(3,2,6);
|
||||
plot(scan_vals, kappa6_vals, '>-', 'LineWidth', 1.5, 'MarkerSize', 6);
|
||||
title('Sixth-order Cumulant of Distribution', 'FontSize', title_fontsize, 'Interpreter', 'latex');
|
||||
xlabel('$\alpha$ (degrees)', 'Interpreter', 'latex', 'FontSize', label_fontsize);
|
||||
ylabel('$\kappa_6$', 'Interpreter', 'latex', 'FontSize', label_fontsize);
|
||||
set(gca, 'FontSize', axis_fontsize);
|
||||
grid on;
|
||||
|
||||
% Super title (you can customize the string)
|
||||
sgtitle('Cumulants of a simulated evolving distribution', ...
|
||||
'FontWeight', 'bold', 'FontSize', 18, 'Interpreter', 'latex');
|
||||
|
||||
%% === Helper: Cumulant Calculation ===
|
||||
function kappa = computeCumulants(data, max_order)
|
||||
data = data(:);
|
||||
mu = mean(data);
|
||||
c = zeros(1, max_order);
|
||||
centered = data - mu;
|
||||
for n = 1:max_order
|
||||
c(n) = mean(centered.^n);
|
||||
end
|
||||
kappa = zeros(1, max_order);
|
||||
kappa(1) = mu;
|
||||
kappa(2) = c(2);
|
||||
kappa(3) = c(3);
|
||||
kappa(4) = c(4) - 3*c(2)^2;
|
||||
kappa(5) = c(5) - 10*c(3)*c(2);
|
||||
kappa(6) = c(6) - 15*c(4)*c(2) - 10*c(3)^2 + 30*c(2)^3;
|
||||
end
|
||||
|
||||
%% === Helper: Skewed Normal Distribution ===
|
||||
function x = skewnormrnd(mu, sigma, alpha, n)
|
||||
% Skew-normal using Azzalini's method
|
||||
delta = alpha / sqrt(1 + alpha^2);
|
||||
u0 = randn(n,1);
|
||||
v = randn(n,1);
|
||||
u1 = delta * u0 + sqrt(1 - delta^2) * v;
|
||||
x = mu + sigma * u1 .* sign(u0);
|
||||
end
|
Loading…
Reference in New Issue
Block a user