tsa_saxs/test.jl

154 lines
4.4 KiB
Julia
Raw Normal View History

2024-03-15 13:08:33 +01:00
import GLMakie as M
2024-05-15 15:28:21 +02:00
import GaussNewton as GN
2024-03-15 13:08:33 +01:00
import Metaheuristics as MH
import DelimitedFiles: readdlm
2024-05-15 15:28:21 +02:00
import LinearAlgebra: norm
2024-03-15 13:08:33 +01:00
include("PLUV.jl")
include("Utils.jl")
# filtered_idx = d[:, 2] .> 0.0
# @views q, I, err = d[filtered_idx, 1], d[filtered_idx, 2], d[filtered_idx, 3]
meta, params_init, lower_bounds, upper_bounds, qie_data = Utils.load_config("Data/PAR_POPC-test.toml")
2024-05-15 15:28:21 +02:00
best_5k_full = begin
2024-03-15 13:08:33 +01:00
f_χ2 = open("POPC-test-5k/Results_collection-X2.dat", "r")
d_χ2 = readdlm(f_χ2)
close(f_χ2)
idx_min_χ2 = argmin(d_χ2)[1]
f_params = open("POPC-test-5k/Results_collection.dat", "r")
for i in 1:(idx_min_χ2-1)
readline(f_params)
end
best_params = map(x -> parse(Float64, x), split(readline(f_params), ' '))
end
2024-05-15 15:28:21 +02:00
mean_5k_full = begin
2024-03-15 13:08:33 +01:00
f_params = open("POPC-test-5k/Results_collection.dat", "r")
params_pop = readdlm(f_params)
N_trials, _ = size(params_pop)
mean_params = vec(sum(params_pop; dims=1) / N_trials)
end
# best_5k = PLUV.reduce_to_free_parameters(meta, [100000.0000000, 7.5004085, 23.5074449, 9.8664991, 0.0000000, 1.0000000, 0.2900000, 0.2203793,
# 0.2990402, 0.4122816, 0.3266636, 0.2276763, 0.2481895, 0.6642123, 0.1203572, 0.2629861, 0.9000000, 0.3050000,
# 0.2945315, 0.2762371, 0.4777401, 0.8100000, 2.0149998, 37.0000000, 0.0299139, 0.0002171])
#
# best_5k = PLUV.reduce_to_free_parameters(meta, [100000.0000000, 7.6975592, 23.4041912, 9.7275630, 0.0000000, 1.0000000, 0.2900000, 0.2219937, 0.3000114,
# 0.4158804, 0.3278631, 0.2296156, 0.2475607, 0.6664143, 0.1191859, 0.2618609, 0.9000000, 0.3050000, 0.2963982, 0.2770345, 0.4762528,
# 0.8100000, 1.9706651, 37.0000000, 0.0299179, 0.0002167])
2024-05-15 15:28:21 +02:00
best_5k_params = PLUV.reduce_to_free_parameters(meta, best_params)
2024-03-15 13:08:33 +01:00
q, I_data, err = Utils.select_columns(qie_data, meta["q_min"], meta["q_max"], meta["binning"], meta["binning"])
q_all, I_all, err_all = Utils.select_columns(qie_data, meta["q_min"], meta["q_max"], 1, meta["binning"])
I_data_lp = Utils.lowpass_filter(I_data; σ=2)
I_all_lp = Utils.lowpass_filter(I_all; σ=2)
w = Utils.compute_logscale_weights(q)
w_all = Utils.compute_logscale_weights(q_all)
2024-05-15 15:28:21 +02:00
# I_init = PLUV.intensity(params_init, q)
2024-03-15 13:08:33 +01:00
intensity_reduced, P_reduced, lb_reduced, ub_reduced = PLUV.reduce_to_free_parameters(meta, params_init, lower_bounds, upper_bounds, q)
2024-05-15 15:28:21 +02:00
simple_bounds = collect(zip(lb_reduced, ub_reduced))
simple_init = 0.5 * (lb_reduced .+ ub_reduced)
padding_factor = fill(0.1, length(simple_init))
padding_factor[17] = 10.0
padding_factor[18] = 10.0
2024-03-15 13:08:33 +01:00
bounds = MH.boxconstraints(lb=lb_reduced, ub=ub_reduced)
function obj_χ2(P)
2024-05-15 15:28:21 +02:00
I_model, neg_H20 = intensity_reduced(P)
χ2 = Utils.chi2(I_data, I_model, err)
factor = (neg_H20 == 0 ? 1.0 : 5.0 * (neg_H20 + 1))
return factor * χ2
2024-03-15 13:08:33 +01:00
end
function obj_residuals(P)
I_model, neg_H20 = intensity_reduced(P)
residuals = Utils.residuals(I_data, I_model, err)
factor = (neg_H20 == 0 ? 1.0 : 5.0 * (neg_H20 + 1))
return factor * residuals
end
barriered_obj = Utils.add_log_barriers(obj_residuals, simple_bounds; padding_factor=padding_factor)
2024-05-15 15:28:21 +02:00
2024-03-15 13:08:33 +01:00
information = MH.Information(f_optimum=0.0)
2024-05-15 15:28:21 +02:00
information = MH.Information()
options = MH.Options(f_calls_limit=10_000, f_tol=1e-5);
2024-03-15 13:08:33 +01:00
algorithm = MH.ECA(information=information, options=options)
2024-05-15 15:28:21 +02:00
#algorithm = MH.PSO()
2024-03-15 13:08:33 +01:00
2024-05-15 15:28:21 +02:00
I_best_5k, _ = PLUV.intensity(best_5k_full, q)
2024-03-15 13:08:33 +01:00
2024-05-15 15:28:21 +02:00
I_mean_5k, _ = PLUV.intensity(mean_5k_full, q_all)
# Gauss-Newton
if true
_, result = GN.optimize(barriered_obj, simple_init)
if GN.has_converged(result)
P_best = result.minimizer
I_best, _ = intensity_reduced(P_best)
else
@error "Gauss-Newton did not converge"
end
end
# Metaheuristics
2024-03-15 13:08:33 +01:00
if false
result = MH.optimize(obj, bounds, algorithm)
2024-05-15 15:28:21 +02:00
@show MH.minimum(result)
2024-03-15 13:08:33 +01:00
P_best = MH.minimizer(result)
2024-05-15 15:28:21 +02:00
I_best, _ = intensity_reduced(P_best)
2024-03-15 13:08:33 +01:00
end
if true
fig = M.Figure()
2024-05-15 15:28:21 +02:00
#ax = M.Axis(fig[1, 1]; xscale=log10, yscale=log10)
ax = M.Axis(fig[1, 1])
2024-03-15 13:08:33 +01:00
M.lines!(ax, q, I_best, label="MH best (julia)")
M.scatter!(ax, q, I_data, label="data")
M.lines!(ax, q, I_best_5k, label="TSA best (5k)")
2024-05-15 15:28:21 +02:00
M.lines!(ax, q_all, I_mean_5k, label="TSA mean (5k)")
2024-03-15 13:08:33 +01:00
M.axislegend()
display(fig)
end
if false
fig = M.Figure()
ax = M.Axis(fig[1, 1]; xscale=log10, yscale=log10)
M.scatter!(ax, q, I)
M.lines!(ax, q_all, I_all)
M.lines!(ax, q_all, I_all_lp)
#M.lines!(ax, q, I_init)
display(fig)
end
if false
fig = M.Figure()
ax = M.Axis(fig[1, 1]; xscale=log10)
#M.scatter!(ax, q, w)
M.scatter!(ax, q_all, w_all)
display(fig)
end