tsa_saxs/test.jl

164 lines
4.9 KiB
Julia
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

import GLMakie as M
import GaussNewton as GN
import Metaheuristics as MH
import DelimitedFiles: readdlm
import LinearAlgebra: norm
include("PLUV.jl")
include("Utils.jl")
# filtered_idx = d[:, 2] .> 0.0
# @views q, I, err = d[filtered_idx, 1], d[filtered_idx, 2], d[filtered_idx, 3]
meta, params_init, lower_bounds, upper_bounds, qie_data = Utils.load_config("Data/PAR_POPC-test.toml")
param_names = keys(params_init)
best_5k_full = begin
f_χ2 = open("POPC-test-5k/Results_collection-X2.dat", "r")
d_χ2 = readdlm(f_χ2)
close(f_χ2)
idx_min_χ2 = argmin(d_χ2)[1]
f_params = open("POPC-test-5k/Results_collection.dat", "r")
for i in 1:(idx_min_χ2-1)
readline(f_params)
end
best_params = map(x -> parse(Float64, x), split(readline(f_params), ' '))
end
mean_5k_full = begin
f_params = open("POPC-test-5k/Results_collection.dat", "r")
params_pop = readdlm(f_params)
N_trials, _ = size(params_pop)
mean_params = vec(sum(params_pop; dims=1) / N_trials)
end
# best_5k = PLUV.reduce_to_free_parameters(meta, [100000.0000000, 7.5004085, 23.5074449, 9.8664991, 0.0000000, 1.0000000, 0.2900000, 0.2203793,
# 0.2990402, 0.4122816, 0.3266636, 0.2276763, 0.2481895, 0.6642123, 0.1203572, 0.2629861, 0.9000000, 0.3050000,
# 0.2945315, 0.2762371, 0.4777401, 0.8100000, 2.0149998, 37.0000000, 0.0299139, 0.0002171])
#
# best_5k = PLUV.reduce_to_free_parameters(meta, [100000.0000000, 7.6975592, 23.4041912, 9.7275630, 0.0000000, 1.0000000, 0.2900000, 0.2219937, 0.3000114,
# 0.4158804, 0.3278631, 0.2296156, 0.2475607, 0.6664143, 0.1191859, 0.2618609, 0.9000000, 0.3050000, 0.2963982, 0.2770345, 0.4762528,
# 0.8100000, 1.9706651, 37.0000000, 0.0299179, 0.0002167])
best_5k_params = PLUV.reduce_to_free_parameters(meta, best_params)
q, I_data, err = Utils.select_columns(qie_data, meta["q_min"], meta["q_max"], meta["binning"], meta["binning"])
q_all, I_all, err_all = Utils.select_columns(qie_data, meta["q_min"], meta["q_max"], 1, meta["binning"])
I_data_lp = Utils.lowpass_filter(I_data; σ=2)
I_all_lp = Utils.lowpass_filter(I_all; σ=2)
w = Utils.compute_logscale_weights(q)
w_all = Utils.compute_logscale_weights(q_all)
# I_init = PLUV.intensity(params_init, q)
intensity_reduced, P_reduced, lb_reduced, ub_reduced = PLUV.reduce_to_free_parameters(meta, params_init, lower_bounds, upper_bounds, q)
simple_bounds = collect(zip(lb_reduced, ub_reduced))
simple_init = 0.5 * (lb_reduced .+ ub_reduced)
padding_factor = fill(1e-1, length(simple_init))
bounds = MH.boxconstraints(lb=lb_reduced, ub=ub_reduced)
function obj_χ2(P)
I_model, neg_H20 = intensity_reduced(P)
χ2 = Utils.chi2(I_data, I_model, err)
factor = (neg_H20 == 0 ? 1.0 : 5.0 * (neg_H20 + 1))
return factor * χ2
end
function obj_residuals(P)
I_model, neg_H20 = intensity_reduced(P)
residuals = Utils.residuals(I_data, I_model, err)
factor = (neg_H20 == 0 ? 1.0 : 5.0 * (neg_H20 + 1))
return factor * residuals
end
barriered_obj = Utils.add_log_barriers(obj_residuals, simple_bounds; padding_factor=padding_factor, mode=:outer)
information = MH.Information(f_optimum=0.0)
information = MH.Information()
options = MH.Options(f_calls_limit=10_000, f_tol=1e-5);
algorithm = MH.ECA(information=information, options=options)
#algorithm = MH.PSO()
I_best_5k, _ = PLUV.intensity(best_5k_full, q)
I_mean_5k, _ = PLUV.intensity(mean_5k_full, q_all)
# Gauss-Newton
if true
initial_guess = PLUV.reduce_to_free_parameters(meta, collect(Float64, values(params_init)))
initial_guess = best_5k_params
_, result = GN.optimize(barriered_obj, initial_guess; show_trace=true, iscale=1, ZCP=1e-2, ZCPMIN=1e-2)
#_, result = GN.optimize(barriered_obj, initial_guess)
if GN.has_converged(result)
@info "Gauss-Newton converged"
@show result
P_best = result.minimizer
I_best, _ = intensity_reduced(P_best)
@info "Simulated Annealing 5k"
Utils.print_check_bounds(param_names, best_5k_params, lb_reduced, ub_reduced)
@info "Our result"
Utils.print_check_bounds(param_names, P_best, lb_reduced, ub_reduced)
else
@error "Gauss-Newton did not converge"
end
end
# Metaheuristics
if false
result = MH.optimize(obj, bounds, algorithm)
@show MH.minimum(result)
P_best = MH.minimizer(result)
I_best, _ = intensity_reduced(P_best)
end
if true
fig = M.Figure()
ax = M.Axis(fig[1, 1]; xscale=log10, yscale=log10)
#ax = M.Axis(fig[1, 1])
M.lines!(ax, q, I_best, label="MH best (julia)")
M.scatter!(ax, q, I_data, label="data")
M.lines!(ax, q, I_best_5k, label="TSA best (5k)")
M.lines!(ax, q_all, I_mean_5k, label="TSA mean (5k)")
M.axislegend()
display(fig)
end
if false
fig = M.Figure()
ax = M.Axis(fig[1, 1]; xscale=log10, yscale=log10)
M.scatter!(ax, q, I)
M.lines!(ax, q_all, I_all)
M.lines!(ax, q_all, I_all_lp)
#M.lines!(ax, q, I_init)
display(fig)
end
if false
fig = M.Figure()
ax = M.Axis(fig[1, 1]; xscale=log10)
#M.scatter!(ax, q, w)
M.scatter!(ax, q_all, w_all)
display(fig)
end