From 9df37aed56affcd39b1267abd0765ef5b770db4f Mon Sep 17 00:00:00 2001 From: "behinger (s-ccs 001)" Date: Tue, 19 Dec 2023 18:44:45 +0000 Subject: [PATCH 01/11] =?UTF-8?q?speed=20improvement=20from=20272=C2=B5s?= =?UTF-8?q?=20to=20640ns?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/likelihood.jl | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/likelihood.jl b/src/likelihood.jl index 1fd6d83..4ca5bf9 100755 --- a/src/likelihood.jl +++ b/src/likelihood.jl @@ -71,8 +71,15 @@ end #Applies location and scale parameter to source signals (per generalized Gaussian) @views function calculate_y!(myAmica::SingleModelAmica) + #for j in 1:myAmica.m + # myAmica.y[:,:,j] .= sqrt.(myAmica.learnedParameters.scale[j,:]) .* (myAmica.source_signals .- myAmica.learnedParameters.location[j,:]) + #end for j in 1:myAmica.m - myAmica.y[:,:,j] .= sqrt.(myAmica.learnedParameters.scale[j,:]) .* (myAmica.source_signals[:,:] .- myAmica.learnedParameters.location[j,:]) + for k = 1: size(myAmica.m,1) + for i = 1:size(myAmica.m,2) + myAmica.y[k,i,j] = sqrt(myAmica.learnedParameters.scale[j,k]) * (myAmica.source_signals[k,i] - myAmica.learnedParameters.location[j,k]) + end + end end end From 362950053ef4b797be7de720cea28e43a97b0237 Mon Sep 17 00:00:00 2001 From: "behinger (s-ccs 001)" Date: Tue, 19 Dec 2023 18:58:33 +0000 Subject: [PATCH 02/11] insane additional 100x speed improvement in calculate_y --- src/types.jl | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/src/types.jl b/src/types.jl index b616aba..be17ff5 100755 --- a/src/types.jl +++ b/src/types.jl @@ -11,27 +11,28 @@ abstract type AbstractAmica end mutable struct SingleModelAmica{T} <:AbstractAmica source_signals::Array{T,2} learnedParameters::GGParameters{T} - m::Union{Integer, Nothing} #Number of gaussians + m::Int #Number of gaussians A::Array{T,2} # unmixing matrices for each model S::Array{T,2} # sphering matrix z::Array{T,3} y::Array{T,3} - centers::Array{T} #model centers - Lt::Array{Float64} #log likelihood of time point for each model ( M x N ) - LL::Array{T} #log likelihood over iterations todo: change to tuple - ldet::Float64 + centers::Array{T,1} #model centers + Lt::Array{T,1} #log likelihood of time point for each model ( M x N ) + LL::Array{T,1} #log likelihood over iterations todo: change to tuple + ldet::T maxiter::Int end -mutable struct MultiModelAmica <:AbstractAmica - models::Array{SingleModelAmica} #Array of SingleModelAmicas + +mutable struct MultiModelAmica{T} <:AbstractAmica + models::Array{SingleModelAmica{T}} #Array of SingleModelAmicas normalized_ica_weights #Model weights (normalized) ica_weights_per_sample #Model weight for each sample ica_weights #Model weight for all samples maxiter::Int #Number of iterations m::Int #Number of Gaussians - LL::AbstractVector #Log-Likelihood + LL::Array{T,1} #Log-Likelihood end #Structure for Learning Rate type with initial value, minumum, maximum etc. Used for learning rate and shape lrate From 16a39f6530c0b4c1c3e860bbcc542d08574461df Mon Sep 17 00:00:00 2001 From: "behinger (s-ccs 001)" Date: Tue, 19 Dec 2023 18:58:55 +0000 Subject: [PATCH 03/11] this should work, but havent tested -oops --- src/likelihood.jl | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/src/likelihood.jl b/src/likelihood.jl index 4ca5bf9..d133f37 100755 --- a/src/likelihood.jl +++ b/src/likelihood.jl @@ -83,15 +83,8 @@ end end end -@views function calculate_y!(myAmica::MultiModelAmica) - n = size(myAmica.models[1].A,1) - for h in 1:size(myAmica.models,1) - #=Threads.@threads=# for i in 1:n - for j in 1:myAmica.m - myAmica.models[h].y[i,:,j] = sqrt(myAmica.models[h].learnedParameters.scale[j,i]) * (myAmica.models[h].source_signals[i,:] .- myAmica.models[h].learnedParameters.location[j,i]) - end - end - end +function calculate_y!(myAmica::MultiModelAmica) + calculate_y!.(myAmica.models[h]) end #Calculates Likelihood for each time sample and for each ICA model From df72b27741281d25d982b583013b79600d6ba907 Mon Sep 17 00:00:00 2001 From: "behinger (s-ccs 001)" Date: Tue, 19 Dec 2023 19:16:10 +0000 Subject: [PATCH 04/11] =?UTF-8?q?added=20missing=20broadcast=20to=20first?= =?UTF-8?q?=20minus,=20340=20to=20230=20=C2=B5s?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/helper.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/helper.jl b/src/helper.jl index b9db918..e323293 100755 --- a/src/helper.jl +++ b/src/helper.jl @@ -35,7 +35,7 @@ end #taken from amica_a.m #L = det(A) * mult p(s|θ) function logpfun(rho, y_rho) - return @inbounds - y_rho .- log(2) .- loggamma(1 + 1 / rho) + return .- y_rho .- log(2) .- loggamma(1 + 1 / rho) end From fe603080c94374e00a9e0cc92547d381d84ca16e Mon Sep 17 00:00:00 2001 From: "behinger (s-ccs 001)" Date: Tue, 19 Dec 2023 19:59:19 +0000 Subject: [PATCH 05/11] actual 10% slower, but less allocs. try later again to use? --- src/likelihood.jl | 35 +++++++++++++++++++++++++++++------ 1 file changed, 29 insertions(+), 6 deletions(-) diff --git a/src/likelihood.jl b/src/likelihood.jl index d133f37..faa939f 100755 --- a/src/likelihood.jl +++ b/src/likelihood.jl @@ -39,7 +39,7 @@ function loopiloop!(myAmica::MultiModelAmica, y_rho) (n,_) = size(myAmica.models[1].source_signals) for h in 1:M #run along models - Threads.@threads for i in 1:n #run along components + for i in 1:n #run along components Q = calculate_Q(myAmica.models[h], i, y_rho) calculate_u!(myAmica.models[h], @view(Q[:, n, :]),i) calculate_Lt!(myAmica.models[h], @view(Q[:, n, :])) @@ -47,10 +47,11 @@ function loopiloop!(myAmica::MultiModelAmica, y_rho) end end -@views function calculate_Q(myAmica::SingleModelAmica, i, y_rho) + +@views function calculate_Q!(myAmica::SingleModelAmica, Q,i, y_rho) (n,N) = size(myAmica.source_signals) m = myAmica.m - Q = zeros(m,N) + #Q = zeros(m,N) for j in 1:m Q[j,:] .= log(myAmica.learnedParameters.proportions[j,i]) + 0.5 * log(myAmica.learnedParameters.scale[j,i]) .+ logpfun(myAmica.learnedParameters.shape[j,i], y_rho[i, :, j]) @@ -59,14 +60,36 @@ end return Q end +function calculate_Q(myAmica::SingleModelAmica, i, y_rho) + (n,N) = size(myAmica.source_signals) + m = myAmica.m + Q = zeros(m,N) + + for k in 1:n + for j in 1:m + Q[j,k] = log(myAmica.learnedParameters.proportions[j,i]) + + 0.5 * log(myAmica.learnedParameters.scale[j,i]) + + logpfun(myAmica.learnedParameters.shape[j,i], y_rho[i, k, j]) + end + end + + return Q +end + #calculates u but saves it into z. MultiModel also uses the SingleModel version -@views function calculate_u!(myAmica::SingleModelAmica, Q, i) + function calculate_u!(myAmica::SingleModelAmica, Q, i) m = size(myAmica.learnedParameters.scale,1) + #scratch_Q = similar(@view(Q[1,:])) + #scratch_Q = similar(Q) if m > 1 - for j in 1:m - myAmica.z[i, :, j] .= (1 ./ sum(optimized_exp(Q .- Q[j, :]'), dims=1))[:] + # i is component/channel + for j in 1:m # 1:3 m mixtures + # I think it is very hard to optimize the Q .- Q[j,:] inner term + sum!( @view(myAmica.z[i,:,j]),optimized_exp(Q .- @view(Q[j, :])')') + end end + myAmica.z[i,:,:] .= 1 ./ @view(myAmica.z[i,:,:]) end #Applies location and scale parameter to source signals (per generalized Gaussian) From 3bb8a134451aa22f7006acc9809da563c1246c21 Mon Sep 17 00:00:00 2001 From: "behinger (s-ccs 001)" Date: Tue, 19 Dec 2023 20:32:47 +0000 Subject: [PATCH 06/11] 15% or so improvement, much better on allocations --- src/likelihood.jl | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/likelihood.jl b/src/likelihood.jl index d133f37..18bc1dd 100755 --- a/src/likelihood.jl +++ b/src/likelihood.jl @@ -91,10 +91,14 @@ end function calculate_Lt!(myAmica::SingleModelAmica,Q) m = size(myAmica.learnedParameters.scale,1) if m > 1 - Qmax = ones(m,1).*maximum(Q,dims=1); - myAmica.Lt[:] = myAmica.Lt' .+ Qmax[1,:]' .+ logsumexp(Q - Qmax,dims = 1) + Qmax = maximum(Q,dims=1); + #@show size(Q), size(Qmax) + Q .= Q .- Qmax + logsumexp!(@view(Qmax[1,:]),Q') + myAmica.Lt .= myAmica.Lt .+ @view(Qmax[1,:]) + else - myAmica.Lt[:] = myAmica.Lt .+ Q[1,:]#todo: test + myAmica.Lt[:] = myAmica.Lt .+ Q[1,:] #todo: test end end From fc2c85dfef4ae67a189cfbf99aab5bb89fd86a78 Mon Sep 17 00:00:00 2001 From: "behinger (s-ccs 001)" Date: Tue, 19 Dec 2023 20:52:48 +0000 Subject: [PATCH 07/11] tried to pull out generation of Q --- src/likelihood.jl | 26 ++++++++++++++++---------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/src/likelihood.jl b/src/likelihood.jl index 18bc1dd..16eead9 100755 --- a/src/likelihood.jl +++ b/src/likelihood.jl @@ -24,12 +24,14 @@ function calculate_LL!(myAmica::MultiModelAmica) end #Update loop for Lt and u (which is saved in z). Todo: Rename -function loopiloop!(myAmica::SingleModelAmica, y_rho) - (n,_) = size(myAmica.source_signals) - - for i in 1:n - Q = calculate_Q(myAmica,i, y_rho) # myAmica.Q - calculate_u!(myAmica,Q,i) # myAmica.z +function loopiloop!(myAmica::SingleModelAmica{T}, y_rho) where {T} + (ncomponents,N) = size(myAmica.source_signals) + Q = Array{T}(undef,myAmica.m,N) + gg = myAmica.learnedParameters + for comps in 1:ncomponents + @views calculate_Q!(Q,gg.proportions[:,comps],gg.scale[:,comps],gg.shape[:,comps],y_rho[comps,:,:]) + #Q = calculate_Q(myAmica,i, y_rho) # myAmica.Q + calculate_u!(myAmica,Q,comps) # myAmica.z calculate_Lt!(myAmica,Q) # myAmica.Q end end @@ -47,13 +49,17 @@ function loopiloop!(myAmica::MultiModelAmica, y_rho) end end -@views function calculate_Q(myAmica::SingleModelAmica, i, y_rho) +function calculate_Q(myAmica::SingleModelAmica, i, y_rho) (n,N) = size(myAmica.source_signals) m = myAmica.m Q = zeros(m,N) - - for j in 1:m - Q[j,:] .= log(myAmica.learnedParameters.proportions[j,i]) + 0.5 * log(myAmica.learnedParameters.scale[j,i]) .+ logpfun(myAmica.learnedParameters.shape[j,i], y_rho[i, :, j]) + gg = myAmica.learnedParameters + @views calculate_Q!(Q,gg.proportions[:,i],gg.scale[:,i],gg.shape[:,i],y_rho[i,:,:]) + return Q +end + function calculate_Q!(Q,proportions,scale,shape,y_rho) + for j in 1:length(proportions) + Q[j,:] .= log(proportions[j]) + 0.5 * log(scale[j]) .+ logpfun(shape[j], y_rho[ :, j]) end return Q From c8ba5fb77553cf09e1252e28b694dd6348aea82f Mon Sep 17 00:00:00 2001 From: "behinger (s-ccs 001)" Date: Tue, 19 Dec 2023 22:09:12 +0000 Subject: [PATCH 08/11] improved y_rho, removed experimental exp/pow --- src/helper.jl | 24 ++++++++++++------------ src/main.jl | 2 +- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/src/helper.jl b/src/helper.jl index e323293..16e3020 100755 --- a/src/helper.jl +++ b/src/helper.jl @@ -50,13 +50,13 @@ function optimized_pow(lhs::AbstractArray{T, 1}, rhs::T) where {T<:Real} end function optimized_pow(lhs::AbstractArray{T, 1}, rhs::AbstractArray{T, 1}) where {T<:Real} - if Sys.iswindows() || Sys.islinux() - return IVM.pow(lhs, rhs) - elseif Sys.isapple() - return AppleAccelerate.pow(lhs, rhs) - else +# if Sys.iswindows() || Sys.islinux() +# return IVM.pow(lhs, rhs) +# elseif Sys.isapple() +# return AppleAccelerate.pow(lhs, rhs) +# else return lhs .^ rhs - end +# end end function optimized_log(val) @@ -71,11 +71,11 @@ end function optimized_exp(val) - if Sys.iswindows() || Sys.islinux() - return IVM.exp(val) - elseif Sys.isapple() - return AppleAccelerate.exp(val) - else + #if Sys.iswindows() || Sys.islinux() + # return IVM.exp(val) + #elseif Sys.isapple() +# return AppleAccelerate.exp(val) + #else return exp.(val) - end + #end end diff --git a/src/main.jl b/src/main.jl index 2adf9aa..2dffdf2 100755 --- a/src/main.jl +++ b/src/main.jl @@ -67,7 +67,7 @@ function amica!(myAmica::AbstractAmica, # pre-calculate abs(y)^rho for j in 1:m for i in 1:n - y_rho[i,:,j] .= optimized_pow(abs.(myAmica.y[i,:,j]), myAmica.learnedParameters.shape[j,i]) + @views y_rho[i,:,j] .= (abs.(myAmica.y[i,:,j]) .^myAmica.learnedParameters.shape[j,i]) end end From abab784638411e13eb9be794d46b951485e2e47b Mon Sep 17 00:00:00 2001 From: "behinger (s-ccs 001)" Date: Thu, 21 Dec 2023 10:52:15 +0000 Subject: [PATCH 09/11] 1.46s/it vs. 0.6s/it --- Project.toml | 1 + src/Amica.jl | 8 +++-- src/helper.jl | 19 ++++++++++ src/likelihood.jl | 57 +++++++++++++++++------------- src/main.jl | 13 +++---- src/parameters.jl | 88 +++++++++++++++++++++++++++-------------------- src/types.jl | 58 ++++++++++++++++--------------- 7 files changed, 146 insertions(+), 98 deletions(-) diff --git a/Project.toml b/Project.toml index 162c2a5..999796d 100755 --- a/Project.toml +++ b/Project.toml @@ -19,6 +19,7 @@ Parameters = "d96e819e-fc66-5662-9728-84c9c7592b0a" ProgressMeter = "92933f4c-e287-5a05-a399-4b506db050ca" SignalAnalysis = "df1fea92-c066-49dd-8b36-eace3378ea47" SpecialFunctions = "276daf66-3868-5448-9aa4-cd146d93841b" +StaticArrays = "90137ffa-7385-5640-81b9-e52037218182" [compat] ComponentArrays = "0.14" diff --git a/src/Amica.jl b/src/Amica.jl index 39d5ec8..2d062e3 100755 --- a/src/Amica.jl +++ b/src/Amica.jl @@ -8,7 +8,7 @@ module Amica using ProgressMeter using LoopVectorization using AppleAccelerate - + using StaticArrays #using ComponentArrays using Diagonalizations using LogExpFunctions @@ -30,12 +30,14 @@ module Amica function Base.show(io::Core.IO,m::SingleModelAmica) try - global like = m.LL[findlast(m.LL .!= 0)] + ix = findlast(.!isnan.(m.LL)) + @show ix + global like = string(m.LL[ix]) * " (after $(string(ix)) iterations)" catch global like = "not run" end println(io,""" - Amica with: + $(typeof(m)) with: - signal-size: $(size(m.source_signals)) - likelihood: $(like) """) diff --git a/src/helper.jl b/src/helper.jl index 16e3020..8bc90eb 100755 --- a/src/helper.jl +++ b/src/helper.jl @@ -44,6 +44,16 @@ end return @inbounds copysign.(optimized_pow(abs.(x), rho - 1), x) .* rho end +function ffun!(fp,x,rho) + #f = rho * sign(x).*abs(x).^(rho-1); + #rho .* (IVM.pow(x, rho - 1), x) + for k = eachindex(x) + fp[k] = sign(x[k]) * rho * abs(x[k])^(rho-1) + end +# fp .= sign.(x).*rho .* abs.(x).^(rho-1) +end + + # optimized power function for different cpu architectures function optimized_pow(lhs::AbstractArray{T, 1}, rhs::T) where {T<:Real} optimized_pow(lhs, repeat([rhs], length(lhs))) @@ -70,6 +80,15 @@ function optimized_log(val) end +function optimized_exp!(val) + if Sys.iswindows() || Sys.islinux() + IVM.exp!(val) + elseif Sys.isapple() + return AppleAccelerate.exp!(val) + else + val .= exp.(val) + end +end function optimized_exp(val) #if Sys.iswindows() || Sys.islinux() # return IVM.exp(val) diff --git a/src/likelihood.jl b/src/likelihood.jl index e6b6e31..8ddeded 100755 --- a/src/likelihood.jl +++ b/src/likelihood.jl @@ -24,12 +24,13 @@ function calculate_LL!(myAmica::MultiModelAmica) end #Update loop for Lt and u (which is saved in z). Todo: Rename -function loopiloop!(myAmica::SingleModelAmica{T}, y_rho) where {T} - (ncomponents,N) = size(myAmica.source_signals) - Q = Array{T}(undef,myAmica.m,N) +function loopiloop!(myAmica::SingleModelAmica{T,ncomps,nmix}, y_rho) where {T,ncomps,nmix} + N = size(myAmica.source_signals,2) + n = size(myAmica.source_signals,1) + Q = Array{T}(undef,nmix,N) gg = myAmica.learnedParameters - for comps in 1:ncomponents - @views calculate_Q!(Q,gg.proportions[:,comps],gg.scale[:,comps],gg.shape[:,comps],y_rho[comps,:,:]) + for comps in 1:n + calculate_Q!(Q,gg.proportions[:,comps],gg.scale[:,comps],gg.shape[:,comps],@view(y_rho[comps,:,:])) #Q = calculate_Q(myAmica,i, y_rho) # myAmica.Q calculate_u!(myAmica,Q,comps) # myAmica.z calculate_Lt!(myAmica,Q) # myAmica.Q @@ -57,7 +58,8 @@ function calculate_Q(myAmica::SingleModelAmica, i, y_rho) @views calculate_Q!(Q,gg.proportions[:,i],gg.scale[:,i],gg.shape[:,i],y_rho[i,:,:]) return Q end - function calculate_Q!(Q,proportions,scale,shape,y_rho) + +function calculate_Q!(Q,proportions,scale,shape,y_rho) for j in 1:length(proportions) Q[j,:] .= log(proportions[j]) + 0.5 * log(scale[j]) .+ logpfun(shape[j], y_rho[ :, j]) end @@ -65,32 +67,39 @@ end return Q end -function calculate_Q(myAmica::SingleModelAmica, i, y_rho) - (n,N) = size(myAmica.source_signals) - m = myAmica.m - Q = zeros(m,N) - - for k in 1:n - for j in 1:m - Q[j,k] = log(myAmica.learnedParameters.proportions[j,i]) + - 0.5 * log(myAmica.learnedParameters.scale[j,i]) + - logpfun(myAmica.learnedParameters.shape[j,i], y_rho[i, k, j]) - end - end - - return Q -end +#function calculate_Q(myAmica::SingleModelAmica, i, y_rho) +# (n,N) = size(myAmica.source_signals) +# m = myAmica.m +# Q = zeros(m,N) +# +# for k in 1:n +# for j in 1:m +# Q[j,k] = log(myAmica.learnedParameters.proportions[j,i]) + +# 0.5 * log(myAmica.learnedParameters.scale[j,i]) + +# logpfun(myAmica.learnedParameters.shape[j,i], y_rho[i, k, j]) +# end +# end +# +# return Q +#end #calculates u but saves it into z. MultiModel also uses the SingleModel version - function calculate_u!(myAmica::SingleModelAmica, Q, i) + function calculate_u!(myAmica::SingleModelAmica{T,ncomps,nmix}, Q, i) where {T,ncomps,nmix} m = size(myAmica.learnedParameters.scale,1) #scratch_Q = similar(@view(Q[1,:])) #scratch_Q = similar(Q) + tmp = Array{T}(undef,size(Q,1)-1,size(Q,2)) if m > 1 # i is component/channel - for j in 1:m # 1:3 m mixtures + ixes = 1:m + for j in ixes # 1:3 m mixtures # I think it is very hard to optimize the Q .- Q[j,:] inner term - sum!( @view(myAmica.z[i,:,j]),exp.(Q .- @view(Q[j, :])')') + ix = (ixes)[ixes .∈ 2] + tmp .= @view(Q[ix,:]) .- @view(Q[j, :])' + #tmp .= exp.(tmp) + #IVM.exp!(tmp) + optimized_exp!(tmp) + sum!( @view(myAmica.z[i,:,j])',tmp) end end diff --git a/src/main.jl b/src/main.jl index 2dffdf2..9ebd91c 100755 --- a/src/main.jl +++ b/src/main.jl @@ -13,9 +13,9 @@ function fit!(amica::AbstractAmica, data; kwargs...) end function amica!(myAmica::AbstractAmica, - data; - lrate = LearningRate(), - shapelrate = LearningRate(;lrate = 0.1,minimum=0.5,maximum=5,init=1.5), + data::AbstractMatrix{T}; + lrate = LearningRate{T}(), + shapelrate = LearningRate{T}(;lrate = 0.1,minimum=0.5,maximum=5,init=1.5), remove_mean = true, do_sphering = true, show_progress = true, @@ -26,9 +26,9 @@ function amica!(myAmica::AbstractAmica, update_shape = 1, mindll = 1e-8, - kwargs...) + kwargs...) where {T} - initialize_shape_parameter(myAmica,shapelrate) + initialize_shape_parameter!(myAmica,shapelrate) (n, N) = size(data) m = myAmica.m @@ -54,10 +54,11 @@ function amica!(myAmica::AbstractAmica, lambda = zeros(n, 1) prog = ProgressUnknown("Minimizing"; showspeed=true) - y_rho = zeros(size(myAmica.y)) + y_rho = similar(myAmica.y) for iter in 1:maxiter #E-step + #@show typeof(myAmica.A),typeof(data) update_sources!(myAmica, data) calculate_ldet!(myAmica) initialize_Lt!(myAmica) diff --git a/src/parameters.jl b/src/parameters.jl index ce89883..ab01851 100644 --- a/src/parameters.jl +++ b/src/parameters.jl @@ -4,15 +4,16 @@ function reparameterize!(myAmica::SingleModelAmica, data) mu = myAmica.learnedParameters.location beta = myAmica.learnedParameters.scale - for i in 1:n - tau = norm(myAmica.A[:,i]) - myAmica.A[:,i] ./= tau - mu[:,i] .*= tau - beta[:,i] ./= tau^2 - end + tau = norm.(eachcol(myAmica.A)) + #for i in 1:n + # tau = norm(myAmica.A[:,i]) + myAmica.A == myAmica.A ./ tau + myAmica.learnedParameters.location = myAmica.learnedParameters.location .* tau' + myAmica.learnedParameters.scale = myAmica.learnedParameters.scale ./tau'.^2 + - myAmica.learnedParameters.location .= mu - myAmica.learnedParameters.scale .= beta + #myAmica.learnedParameters.location .= mu + #myAmica.learnedParameters.scale .= beta return myAmica end @@ -73,14 +74,15 @@ function calculate_z!(myAmica::MultiModelAmica, i,j,h) end #Updates the Gaussian mixture proportion parameter -function update_mixture_proportions!(sumz, myAmica::SingleModelAmica,j,i) +function update_mixture_proportions!(myAmica::SingleModelAmica,sumz) N = size(myAmica.source_signals,2) if myAmica.m > 1 - myAmica.learnedParameters.proportions[j,i] = sumz / N + myAmica.learnedParameters.proportions = sumz ./ N end end function update_mixture_proportions!(sumz, myAmica::MultiModelAmica,j,i,h) + error("outdated") if myAmica.m > 1 myAmica.models[h].learnedParameters.proportions[j,i] = sumz / myAmica.ica_weights[h] end @@ -133,15 +135,12 @@ function update_scale(zfp,y,scale,z,shape) end #Sets the initial value for the shape parameter of the GeneralizedGaussians for each Model -function initialize_shape_parameter(myAmica::SingleModelAmica, shapelrate::LearningRate) - myAmica.learnedParameters.shape .= shapelrate.init .*myAmica.learnedParameters.shape +function initialize_shape_parameter!(myAmica::SingleModelAmica, shapelrate::LearningRate) + myAmica.learnedParameters.shape = shapelrate.init .*myAmica.learnedParameters.shape end -function initialize_shape_parameter(myAmica::MultiModelAmica, shapelrate::LearningRate) - M = size(myAmica.models,1) - for h in 1:M - myAmica.models[h].learnedParameters.shape .= shapelrate.init .*myAmica.models[h].learnedParameters.shape - end +function initialize_shape_parameter!(myAmica::MultiModelAmica, shapelrate::LearningRate) + initialize_shape_parameter!.(myAmica.models) end #Updates Gaussian mixture Parameters and mixing matrix. todo: rename since its not a loop for single model @@ -191,7 +190,7 @@ end #Updates Gaussian mixture parameters. It also returns g, kappa and lamda which are needed to apply the newton method. #Todo: Save g, kappa, lambda in structure, remove return -@views function update_parameters!(myAmica::SingleModelAmica, lambda, y_rho, lrate_rho::LearningRate, upd_shape) +@views function update_parameters!(myAmica::SingleModelAmica{T,ncomps,nmix}, lambda, y_rho, lrate_rho::LearningRate, upd_shape) where {T,ncomps,nmix} alpha = myAmica.learnedParameters.proportions beta = myAmica.learnedParameters.scale mu = myAmica.learnedParameters.location @@ -216,11 +215,9 @@ end sumz[sumz .< 0] .= 1 myAmica.z ./= sumz end - for i in 1:n - for j in 1:m - update_mixture_proportions!(sumz[i, 1, j],myAmica,j,i) - end - end + + update_mixture_proportions!(myAmica,sumz[:, 1, :]) + # update # - fp @@ -238,42 +235,55 @@ end # - alpha # - beta # - mu + kp = similar(rho) + fp = Vector{T}(undef,N) for i in 1:n for j in 1:m - fp = ffun(myAmica.y[i,:,j], rho[j,i]) + #@show size(fp), size(myAmica.y) + ffun!(fp,myAmica.y[i,:,j], rho[j,i]) zfp[j,:] .= myAmica.z[i,:,j] .* fp g[i,:] .+= alpha[j,i] .* sqrt(beta[j,i]) .*zfp[j,:] + + kp[j,i] = beta[j,i] .* sum(zfp[j,:].*fp) - kp = beta[j,i] .* sum(zfp[j,:].*fp) + kappa[i] += alpha[j,i] * kp[j,i] - kappa[i] += alpha[j,i] * kp + lambda[i] += alpha[j,i] * (sum(myAmica.z[i,:,j].*(fp.*myAmica.y[i,:,j] .-1).^2) + mu[j,i]^2 * kp[j,i]) + end + end - lambda[i] += alpha[j,i] * (sum(myAmica.z[i,:,j].*(fp.*myAmica.y[i,:,j] .-1).^2) + mu[j,i]^2 * kp) - mu[j,i] = update_location(myAmica,rho[j,i],zfp[j,:],myAmica.y[i,:,j],mu[j,i],beta[j,i],kp) - - - beta[j,i] = update_scale(zfp[j,:],myAmica.y[i,:,j],beta[j,i],myAmica.z[i,:,j],rho[j,i]) + #mu = update_location(myAmica,rho,zfp,mu,beta,kp) + updated_mu = Array(similar(rho)) # convert from StaticMatrix to Mutable + updated_beta = Array(similar(rho)) + + for i in 1:n + for j in 1:m + updated_mu[j,i] = update_location(myAmica,rho[j,i],zfp[j,:],myAmica.y[i,:,j],mu[j,i],beta[j,i],kp[j,i]) + updated_beta[j,i] = update_scale(zfp[j,:],myAmica.y[i,:,j],beta[j,i],myAmica.z[i,:,j],rho[j,i]) end end + + + # update rho # depends on rho, zfp, myAmica.y, mu, beta if upd_shape == 1 dr = sum(myAmica.z .* optimized_log(y_rho) .* y_rho, dims=2) - + updated_rho = Array(similar(rho)) for i in 1:n for j in 1:m - rho[j, i] = update_shape(rho[j, i], dr[i, 1, j], lrate_rho) + updated_rho[j, i] = update_shape(rho[j, i], dr[i, 1, j], lrate_rho) end end end myAmica.learnedParameters.proportions = alpha - myAmica.learnedParameters.scale = beta - myAmica.learnedParameters.location = mu - myAmica.learnedParameters.shape = rho + myAmica.learnedParameters.scale = updated_beta + myAmica.learnedParameters.location = updated_mu + myAmica.learnedParameters.shape = updated_rho return g, kappa end @@ -371,7 +381,11 @@ end #Updates source singal estimations by unmixing the data function update_sources!(myAmica::SingleModelAmica, data) - myAmica.source_signals = pinv(myAmica.A) * data + #myAmica.source_signals .= myAmica.A \ data# + #myAmica.source_signals .= myAmica.A \ data# + #ldiv!(myAmica.source_signals,myAmica.A,data) + + myAmica.source_signals .= pinv(myAmica.A) * data end function update_sources!(myAmica::MultiModelAmica, data) diff --git a/src/types.jl b/src/types.jl index be17ff5..e1cbd8a 100755 --- a/src/types.jl +++ b/src/types.jl @@ -1,18 +1,18 @@ -mutable struct GGParameters{T} - proportions::Array{T, 2} #source density mixture proportions - scale::Array{T, 2} #source density inverse scale parameter - location::Array{T, 2} #source density location parameter - shape::Array{T, 2} #source density shape paramters +mutable struct GGParameters{T,ncomps,nmix} + proportions::SMatrix{nmix,ncomps,T} #source density mixture proportions + scale::SMatrix{nmix,ncomps,T} #source density inverse scale parameter + location::SMatrix{nmix,ncomps,T} #source density location parameter + shape::SMatrix{nmix,ncomps,T} #source density shape paramters end abstract type AbstractAmica end -mutable struct SingleModelAmica{T} <:AbstractAmica +mutable struct SingleModelAmica{T,ncomps,nmix} <:AbstractAmica source_signals::Array{T,2} - learnedParameters::GGParameters{T} + learnedParameters::GGParameters{T,ncomps,nmix} m::Int #Number of gaussians - A::Array{T,2} # unmixing matrices for each model + A::SMatrix{ncomps,ncomps,T} # unmixing matrices for each model S::Array{T,2} # sphering matrix z::Array{T,3} y::Array{T,3} @@ -37,59 +37,61 @@ end #Structure for Learning Rate type with initial value, minumum, maximum etc. Used for learning rate and shape lrate using Parameters -@with_kw mutable struct LearningRate - lrate::Real = 0.1 - init::Float64 = 0.1 - minimum::Float64 = 0. - maximum::Float64 = 1.0 - natural_rate::Float64 = 0.1 - decreaseFactor::Float64 = 0.5 +@with_kw mutable struct LearningRate{T} + lrate::T = 0.1 + init::T = 0.1 + minimum::T = 0. + maximum::T = 1.0 + natural_rate::T = 0.1 + decreaseFactor::T = 0.5 end #Data type for AMICA with just one ICA model. todo: rename gg parameters function SingleModelAmica(data::AbstractArray{T}; m=3, maxiter=500, A=nothing, location=nothing, scale=nothing, kwargs...) where {T<:Real} (n, N) = size(data) + ncomps = n + nmix = m #initialize parameters - centers = zeros(n) + centers = zeros(T,n) eye = Matrix(I, n, n) if isnothing(A) - A = zeros(n,n) + A = zeros(T,n,n) A[:,:] = eye[n] .+ 0.1*rand(n,n) for i in 1:n A[:,i] = A[:,i] / norm(A[:,i]) end end - proportions = (1/m) * ones(m,n) + proportions = (1/m) * ones(T,m,n) if isnothing(location) if m > 1 - location = 0.1 * randn(m, n) + location = 0.1 * randn(T,m, n) else - location = zeros(m, n) + location = zeros(T,m, n) end end if isnothing(scale) - scale = ones(m, n) + 0.1 * randn(m, n) + scale = ones(T,m, n) + 0.1 * randn(T,m, n) end - shape = ones(m, n) + shape = ones(T,m, n) - y = zeros(n,N,m) + y = zeros(T,n,N,m) - Lt = zeros(N) - z = ones(n,N,m)/N + Lt = zeros(T,N) + z = ones(T,n,N,m)/N #Sets some parameters to nothing if used my MultiModel to only have them once if isnothing(maxiter) LL = nothing m = nothing else - LL = Float64[] + LL = T[] end ldet = 0.0 - source_signals = zeros(n,N) + source_signals = zeros(T,n,N) - return SingleModelAmica{T}(source_signals,GGParameters{T}(proportions,scale,location,shape),m,A,I(size(A,1)), z,y,#=Q,=#centers,Lt,LL,ldet,maxiter) + return SingleModelAmica{T,ncomps,nmix}(source_signals,GGParameters{T,ncomps,nmix}(proportions,scale,location,shape),m,A,I(size(A,1)), z,y,#=Q,=#centers,Lt,LL,ldet,maxiter) end #Data type for AMICA with multiple ICA models From 0747103602f312b1b5b79e655dfe3a5675698537 Mon Sep 17 00:00:00 2001 From: "behinger (s-ccs 001)" Date: Thu, 21 Dec 2023 16:25:53 +0000 Subject: [PATCH 10/11] finally works again.... --- src/likelihood.jl | 39 ++++++++++---------- src/main.jl | 20 +++++++++-- src/parameters.jl | 92 +++++++++++++++++++++++------------------------ 3 files changed, 83 insertions(+), 68 deletions(-) diff --git a/src/likelihood.jl b/src/likelihood.jl index 8ddeded..38b5296 100755 --- a/src/likelihood.jl +++ b/src/likelihood.jl @@ -31,10 +31,13 @@ function loopiloop!(myAmica::SingleModelAmica{T,ncomps,nmix}, y_rho) where {T,nc gg = myAmica.learnedParameters for comps in 1:n calculate_Q!(Q,gg.proportions[:,comps],gg.scale[:,comps],gg.shape[:,comps],@view(y_rho[comps,:,:])) + @debug :Q, Q[1,1],gg.proportions[1,comps],gg.scale[1,comps],gg.shape[1,comps],myAmica.y[comps,1,1] #,@view(y_rho[comps,:,:])) #Q = calculate_Q(myAmica,i, y_rho) # myAmica.Q calculate_u!(myAmica,Q,comps) # myAmica.z + @debug "z",myAmica.z[comps,1,1] calculate_Lt!(myAmica,Q) # myAmica.Q end + @debug "lt",myAmica.Lt[[1,end]] end function loopiloop!(myAmica::MultiModelAmica, y_rho) @@ -88,14 +91,14 @@ end m = size(myAmica.learnedParameters.scale,1) #scratch_Q = similar(@view(Q[1,:])) #scratch_Q = similar(Q) - tmp = Array{T}(undef,size(Q,1)-1,size(Q,2)) + tmp = Array{T}(undef,size(Q,1),size(Q,2)) if m > 1 # i is component/channel ixes = 1:m for j in ixes # 1:3 m mixtures # I think it is very hard to optimize the Q .- Q[j,:] inner term - ix = (ixes)[ixes .∈ 2] - tmp .= @view(Q[ix,:]) .- @view(Q[j, :])' +# ix = (ixes)[ixes .∈ 2] + tmp .= @view(Q[:,:]) .- @view(Q[j, :])' #tmp .= exp.(tmp) #IVM.exp!(tmp) optimized_exp!(tmp) @@ -108,16 +111,21 @@ end #Applies location and scale parameter to source signals (per generalized Gaussian) @views function calculate_y!(myAmica::SingleModelAmica) - #for j in 1:myAmica.m - # myAmica.y[:,:,j] .= sqrt.(myAmica.learnedParameters.scale[j,:]) .* (myAmica.source_signals .- myAmica.learnedParameters.location[j,:]) - #end - for j in 1:myAmica.m - for k = 1: size(myAmica.m,1) - for i = 1:size(myAmica.m,2) - myAmica.y[k,i,j] = sqrt(myAmica.learnedParameters.scale[j,k]) * (myAmica.source_signals[k,i] - myAmica.learnedParameters.location[j,k]) - end + + + for j in 1:myAmica.m + myAmica.y[:,:,j] .= sqrt.(myAmica.learnedParameters.scale[j,:]) .* (myAmica.source_signals .- myAmica.learnedParameters.location[j,:]) end - end +# @debug myAmica.y[1,1,1:3],myAmica.learnedParameters.scale[1:3,1], myAmica.learnedParameters.location[1:3,1] +# for j in 1:myAmica.m +# for i = 1:size(myAmica.source_signals,2) +# for k = 1: size(myAmica.source_signals,1) +# myAmica.y[k,i,j] = sqrt(myAmica.learnedParameters.scale[j,k]) * (myAmica.source_signals[k,i] - myAmica.learnedParameters.location[j,k]) +# end +# end + #end + + end function calculate_y!(myAmica::MultiModelAmica) @@ -128,12 +136,7 @@ end function calculate_Lt!(myAmica::SingleModelAmica,Q) m = size(myAmica.learnedParameters.scale,1) if m > 1 - Qmax = maximum(Q,dims=1); - #@show size(Q), size(Qmax) - Q .= Q .- Qmax - logsumexp!(@view(Qmax[1,:]),Q') - myAmica.Lt .= myAmica.Lt .+ @view(Qmax[1,:]) - + myAmica.Lt .+= logsumexp(Q;dims=1)[1,:] else myAmica.Lt[:] = myAmica.Lt .+ Q[1,:] #todo: test end diff --git a/src/main.jl b/src/main.jl index 9ebd91c..4b83f7f 100755 --- a/src/main.jl +++ b/src/main.jl @@ -57,14 +57,24 @@ function amica!(myAmica::AbstractAmica, y_rho = similar(myAmica.y) for iter in 1:maxiter + gg = myAmica.learnedParameters + @debug :scale,gg.scale[2],:location,gg.location[2],:proportions,gg.proportions[2],:shape,gg.shape[2] + @debug println("") #E-step #@show typeof(myAmica.A),typeof(data) + @debug :A myAmica.A[1:2,1:2] + + @debug :source_signals myAmica.source_signals[1],myAmica.source_signals[end] update_sources!(myAmica, data) + @debug :source_signals myAmica.source_signals[1],myAmica.source_signals[end] calculate_ldet!(myAmica) initialize_Lt!(myAmica) myAmica.Lt .+= LLdetS - calculate_y!(myAmica) + + + calculate_y!(myAmica) + @debug :y, myAmica.y[1,1:3,1] # pre-calculate abs(y)^rho for j in 1:m for i in 1:n @@ -75,6 +85,8 @@ function amica!(myAmica::AbstractAmica, loopiloop!(myAmica, y_rho) #Updates y and Lt. Todo: Rename + + calculate_LL!(myAmica) @@ -107,8 +119,11 @@ function amica!(myAmica::AbstractAmica, rethrow() end end - + @debug iter, :A myAmica.A[1:2,1:2] + reparameterize!(myAmica, data) + + @debug iter, :A myAmica.A[1:2,1:2] #Shows current progress show_progress && ProgressMeter.next!(prog; showvalues=[(:LL, myAmica.LL[iter])]) @@ -121,5 +136,6 @@ function amica!(myAmica::AbstractAmica, if remove_mean add_means_back!(myAmica, removed_mean) end + @debug myAmica.LL return myAmica end \ No newline at end of file diff --git a/src/parameters.jl b/src/parameters.jl index ab01851..b61b159 100644 --- a/src/parameters.jl +++ b/src/parameters.jl @@ -1,20 +1,13 @@ #Normalizes source density location parameter (mu), scale parameter (beta) and model centers -function reparameterize!(myAmica::SingleModelAmica, data) - (n,N) = size(myAmica.source_signals) - mu = myAmica.learnedParameters.location - beta = myAmica.learnedParameters.scale +function reparameterize!(myAmica::SingleModelAmica, data) tau = norm.(eachcol(myAmica.A)) - #for i in 1:n - # tau = norm(myAmica.A[:,i]) - myAmica.A == myAmica.A ./ tau - myAmica.learnedParameters.location = myAmica.learnedParameters.location .* tau' - myAmica.learnedParameters.scale = myAmica.learnedParameters.scale ./tau'.^2 + @debug size(tau) tau + myAmica.A = myAmica.A ./ tau' + myAmica.learnedParameters.location = myAmica.learnedParameters.location .* tau' + myAmica.learnedParameters.scale = myAmica.learnedParameters.scale ./tau'.^2 - #myAmica.learnedParameters.location .= mu - #myAmica.learnedParameters.scale .= beta - return myAmica end @@ -77,7 +70,8 @@ end function update_mixture_proportions!(myAmica::SingleModelAmica,sumz) N = size(myAmica.source_signals,2) if myAmica.m > 1 - myAmica.learnedParameters.proportions = sumz ./ N + + myAmica.learnedParameters.proportions = sumz' ./ N end end @@ -124,6 +118,7 @@ end function update_scale(zfp,y,scale,z,shape) if shape <= 2 db = sum(zfp.*y) + #@show db if db > 0 scale = scale ./ db end @@ -147,7 +142,7 @@ end function update_loop!(myAmica::SingleModelAmica, lambda, y_rho, shapelrate, update_shape, iter, do_newton, newt_start_iter, lrate) #Update parameters g, kappa = update_parameters!(myAmica, lambda, y_rho, shapelrate, update_shape) - + @debug :g, g[1],:kappa kappa[1] #Checks for NaN in parameters before updating the mixing matrix if any(isnan, kappa) || any(isnan, myAmica.source_signals) || any(isnan, lambda) || any(isnan, g) || any(isnan, myAmica.learnedParameters.proportions) throw(AmicaNaNException()) @@ -191,15 +186,16 @@ end #Updates Gaussian mixture parameters. It also returns g, kappa and lamda which are needed to apply the newton method. #Todo: Save g, kappa, lambda in structure, remove return @views function update_parameters!(myAmica::SingleModelAmica{T,ncomps,nmix}, lambda, y_rho, lrate_rho::LearningRate, upd_shape) where {T,ncomps,nmix} - alpha = myAmica.learnedParameters.proportions - beta = myAmica.learnedParameters.scale - mu = myAmica.learnedParameters.location - rho = myAmica.learnedParameters.shape + gg = myAmica.learnedParameters + #alpha = myAmica.learnedParameters.proportions + #beta = myAmica.learnedParameters.scale + #mu = myAmica.learnedParameters.location + #rho = myAmica.learnedParameters.shape (n,N) = size(myAmica.source_signals) m = myAmica.m - g = zeros(n,N) - kappa = zeros(n,1) - zfp = zeros(m, N) + g = zeros(T,n,N) + kappa = zeros(T,n,1) + zfp = zeros(T,m, N) # update # - myAmica.learnedParameters.proportions @@ -215,7 +211,7 @@ end sumz[sumz .< 0] .= 1 myAmica.z ./= sumz end - + update_mixture_proportions!(myAmica,sumz[:, 1, :]) @@ -235,55 +231,55 @@ end # - alpha # - beta # - mu - kp = similar(rho) + kp = similar(gg.shape) + + updated_location = Array(similar(gg.shape)) # convert from StaticMatrix to Mutable + updated_scale = Array(similar(gg.shape)) + fp = Vector{T}(undef,N) for i in 1:n for j in 1:m #@show size(fp), size(myAmica.y) - ffun!(fp,myAmica.y[i,:,j], rho[j,i]) + ffun!(fp,myAmica.y[i,:,j], gg.shape[j,i]) zfp[j,:] .= myAmica.z[i,:,j] .* fp - g[i,:] .+= alpha[j,i] .* sqrt(beta[j,i]) .*zfp[j,:] + g[i,:] .+= gg.proportions[j,i] .* sqrt(gg.scale[j,i]) .*zfp[j,:] - kp[j,i] = beta[j,i] .* sum(zfp[j,:].*fp) - - kappa[i] += alpha[j,i] * kp[j,i] - - lambda[i] += alpha[j,i] * (sum(myAmica.z[i,:,j].*(fp.*myAmica.y[i,:,j] .-1).^2) + mu[j,i]^2 * kp[j,i]) + + kp[j,i] = gg.scale[j,i] .* sum(zfp[j,:].*fp) + + kappa[i] += gg.proportions[j,i] * kp[j,i] + + lambda[i] += gg.proportions[j,i] * (sum(myAmica.z[i,:,j].*(fp.*myAmica.y[i,:,j] .-1).^2) + gg.location[j,i]^2 * kp[j,i]) + @debug fp[1],zfp[j,1],g[i,1],gg.proportions[j,i],gg.scale[j,i], kp[j,i],kappa[i] + updated_location[j,i] = update_location(myAmica,gg.shape[j,i],zfp[j,:],myAmica.y[i,:,j],gg.location[j,i],gg.scale[j,i],kp[j,i]) + + @debug :zfp,zfp[j,1],:y,myAmica.y[i,1,j],:z,myAmica.z[i,1,j] + + updated_scale[j,i] = update_scale(zfp[j,:],myAmica.y[i,:,j],gg.scale[j,i],myAmica.z[i,:,j],gg.shape[j,i]) end end #mu = update_location(myAmica,rho,zfp,mu,beta,kp) - updated_mu = Array(similar(rho)) # convert from StaticMatrix to Mutable - updated_beta = Array(similar(rho)) - - for i in 1:n - for j in 1:m - updated_mu[j,i] = update_location(myAmica,rho[j,i],zfp[j,:],myAmica.y[i,:,j],mu[j,i],beta[j,i],kp[j,i]) - updated_beta[j,i] = update_scale(zfp[j,:],myAmica.y[i,:,j],beta[j,i],myAmica.z[i,:,j],rho[j,i]) - end - end - - # update rho # depends on rho, zfp, myAmica.y, mu, beta if upd_shape == 1 dr = sum(myAmica.z .* optimized_log(y_rho) .* y_rho, dims=2) - updated_rho = Array(similar(rho)) + updated_shape = Array(similar(gg.shape)) for i in 1:n for j in 1:m - updated_rho[j, i] = update_shape(rho[j, i], dr[i, 1, j], lrate_rho) + updated_shape[j, i] = update_shape(gg.shape[j, i], dr[i, 1, j], lrate_rho) end end end - myAmica.learnedParameters.proportions = alpha - myAmica.learnedParameters.scale = updated_beta - myAmica.learnedParameters.location = updated_mu - myAmica.learnedParameters.shape = updated_rho + #myAmica.learnedParameters.proportions = alpha + myAmica.learnedParameters.scale = updated_scale + myAmica.learnedParameters.location = updated_location + myAmica.learnedParameters.shape = updated_shape return g, kappa end @@ -385,7 +381,7 @@ function update_sources!(myAmica::SingleModelAmica, data) #myAmica.source_signals .= myAmica.A \ data# #ldiv!(myAmica.source_signals,myAmica.A,data) - myAmica.source_signals .= pinv(myAmica.A) * data + myAmica.source_signals = pinv(myAmica.A) * data end function update_sources!(myAmica::MultiModelAmica, data) From 884b96af3c7f8bebb92205d360b6b3beb97279b1 Mon Sep 17 00:00:00 2001 From: "behinger (s-ccs 001)" Date: Thu, 18 Jan 2024 11:13:06 +0000 Subject: [PATCH 11/11] unsure --- test/compare_amica_implementations.jl | 58 +++++++++++++++++++-------- 1 file changed, 41 insertions(+), 17 deletions(-) diff --git a/test/compare_amica_implementations.jl b/test/compare_amica_implementations.jl index 5f4c468..b1d8992 100644 --- a/test/compare_amica_implementations.jl +++ b/test/compare_amica_implementations.jl @@ -8,8 +8,8 @@ using CairoMakie #--- includet("../../src/simulate_data.jl") includet("../../src/fortran_tools.jl") -n_chan=3 -n_time=10000 +n_chan=10 +n_time=500_000 x,A,s = simulate_data(;T=50,n_chan,n_time,type=:gg6) i_scale,i_location,i_A = init_params(;n_chan,n_time) @@ -22,36 +22,41 @@ f #--- -maxiter = 200 +maxiter = 50 + # matlab run mat""" tic [mA,mc,mLL,mLt,mgm,malpha,mmu,mbeta,mrho] = amica_a($x,1,3,$maxiter,$i_location,$i_scale,$i_A,0); -toc +mt = toc """ mat""" tic [mAopt,mWopt,mSopt,mkhindsopt,mcopt,mLLopt,mLtopt,mgmopt,malphaopt,mmuopt,mbetaopt,mrhoopt] = amica_optimized($x,1,3,$maxiter,1,1,$i_location,$i_scale,$i_A); -toc +mtopt = toc + """ mA = @mget(mA); # get matlab var mAopt = @mget(mAopt); # get matlab opt var +t_mopt = @mget(mtopt); # get matlab var +t_m = @mget(mt); # get matlab var + # Fortran setup + ran fortran_setup(Float32.(x);max_threads=1,max_iter=maxiter) -@time run(`/scratch/projects/fapra_amica/fortran/amica15test julia.param`) +t_f32 = @elapsed run(`/scratch/projects/fapra_amica/fortran/amica15test julia.param`) fA = reshape(reinterpret(Float64,(read("amicaout/A"))),n_chan,n_chan) fortran_setup(Float64.(x);max_threads=1,max_iter=maxiter,dble_data=1) -@time run(`/scratch/projects/fapra_amica/fortran/amica15test julia.param`) +t_f64 = @elapsed run(`/scratch/projects/fapra_amica/fortran/amica15test julia.param`) -#--- Julia run -am32 = SingleModelAmica(Float32.(x);maxiter=maxiter,A=deepcopy(i_A),location=i_location,scale=i_scale) -@time fit!(am32,Float32.(x)) +# Julia run +am32 = SingleModelAmica(Float32.(x);maxiter=maxiter,A=deepcopy(i_A),location=deepcopy(i_location),scale=deepcopy(i_scale)) +t_am32 = @elapsed fit!(am32,Float32.(x)) -am64 = SingleModelAmica(Float64.(x);maxiter=maxiter,A=deepcopy(i_A),location=i_location,scale=i_scale) -@time fit!(am64,Float64.(x)) +am64 = SingleModelAmica(Float64.(x);maxiter=maxiter,A=deepcopy(i_A),location=deepcopy(i_location),scale=deepcopy(i_scale)) +t_am64 = @elapsed fit!(am64,Float64.(x)) #am = SingleModelAmica(Float16.(x);maxiter=maxiter,A=i_A,location=i_location,scale=i_scale) #@time fit!(am,x) #vcat(@mget(mLL),am.LL') @@ -62,7 +67,7 @@ series(f2[1,1],inv(am64.A)*x, axis=(;title="unmixed julia64")) series(f2[2,1],inv(am32.A)*x, axis=(;title="unmixed julia32")) series(f2[3,1],inv(mA)*x, axis=(;title="unmixed matlab")) series(f2[4,1],inv(mAopt)*x, axis=(;title="unmixed matlab_optimizd")) -series(f2[5,1],.-inv(fA')*x, axis=(;title="unmixed fortran")) +series(f2[5,1],inv(fA)*x, axis=(;title="unmixed fortran")) series(f2[6,1],s,axis=(;title="original source")) series(f2[7,1],x, axis=(;title="original mixed")) hidedecorations!.(f2.content) @@ -73,17 +78,36 @@ f2 #--- compare LLs + +visibnoise = 0.02 # add little bit of noise to distinguish identical lines mLL = @mget mLL mLLopt = @mget mLLopt fLL = reinterpret(Float64,(read("amicaout/LL"))) -f = Figure(size=(1024,1024)) + +f = Figure(size=(1024,512)) ax = f[1,1] = Axis(f) labels = ["julia64","julia32","matlab", "matlab opt","fortran"] for (ix,d) = enumerate([am64.LL, am32.LL,mLL[1,:],mLLopt[1,:],fLL]) - lines!(ax,d;label=labels[ix]) + lines!(ax,d .+ rand(size(d)...).*visibnoise;label=labels[ix]) end axislegend(ax) -ylims!(ax,-2.3,-0.5) +ylims!(ax,-5,0.1) + + +ax_T = f[1,2] = Axis(f) +scatter!(ax_T,[t_am32,t_am64,t_m,t_mopt, t_f32,t_f64]) +ax_T.xticks = 1:6 +ax_T.xtickformat = x->["am32","am64","mat","matopt","f32","f64"][Int.(x)] + f -#--- compare A + +#--- + + +#---- +am32 = SingleModelAmica(Float32.(x);maxiter=10)#,A=deepcopy(i_A),location=deepcopy(i_location),scale=deepcopy(i_scale)) +@profview fit!(am32,Float32.(x)) + +fortran_setup(Float32.(x);max_threads=1,max_iter=5) +t_f32 = @elapsed run(`/scratch/projects/fapra_amica/fortran/amica15test julia.param`) \ No newline at end of file