Skip to content
Open
1 change: 1 addition & 0 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ Parameters = "d96e819e-fc66-5662-9728-84c9c7592b0a"
ProgressMeter = "92933f4c-e287-5a05-a399-4b506db050ca"
SignalAnalysis = "df1fea92-c066-49dd-8b36-eace3378ea47"
SpecialFunctions = "276daf66-3868-5448-9aa4-cd146d93841b"
StaticArrays = "90137ffa-7385-5640-81b9-e52037218182"

[compat]
ComponentArrays = "0.14"
Expand Down
8 changes: 5 additions & 3 deletions src/Amica.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ module Amica
using ProgressMeter
using LoopVectorization
using AppleAccelerate

using StaticArrays
#using ComponentArrays
using Diagonalizations
using LogExpFunctions
Expand All @@ -30,12 +30,14 @@ module Amica

function Base.show(io::Core.IO,m::SingleModelAmica)
try
global like = m.LL[findlast(m.LL .!= 0)]
ix = findlast(.!isnan.(m.LL))
@show ix
global like = string(m.LL[ix]) * " (after $(string(ix)) iterations)"
catch
global like = "not run"
end
println(io,"""
Amica with:
$(typeof(m)) with:
- signal-size: $(size(m.source_signals))
- likelihood: $(like)
""")
Expand Down
41 changes: 30 additions & 11 deletions src/helper.jl
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ end
#taken from amica_a.m
#L = det(A) * mult p(s|θ)
function logpfun(rho, y_rho)
return @inbounds - y_rho .- log(2) .- loggamma(1 + 1 / rho)
return .- y_rho .- log(2) .- loggamma(1 + 1 / rho)
end


Expand All @@ -44,19 +44,29 @@ end
return @inbounds copysign.(optimized_pow(abs.(x), rho - 1), x) .* rho
end

function ffun!(fp,x,rho)
#f = rho * sign(x).*abs(x).^(rho-1);
#rho .* (IVM.pow(x, rho - 1), x)
for k = eachindex(x)
fp[k] = sign(x[k]) * rho * abs(x[k])^(rho-1)
end
# fp .= sign.(x).*rho .* abs.(x).^(rho-1)
end


# optimized power function for different cpu architectures
function optimized_pow(lhs::AbstractArray{T, 1}, rhs::T) where {T<:Real}
optimized_pow(lhs, repeat([rhs], length(lhs)))
end

function optimized_pow(lhs::AbstractArray{T, 1}, rhs::AbstractArray{T, 1}) where {T<:Real}
if Sys.iswindows() || Sys.islinux()
return IVM.pow(lhs, rhs)
elseif Sys.isapple()
return AppleAccelerate.pow(lhs, rhs)
else
# if Sys.iswindows() || Sys.islinux()
# return IVM.pow(lhs, rhs)
# elseif Sys.isapple()
# return AppleAccelerate.pow(lhs, rhs)
# else
return lhs .^ rhs
end
# end
end

function optimized_log(val)
Expand All @@ -70,12 +80,21 @@ function optimized_log(val)
end


function optimized_exp(val)
function optimized_exp!(val)
if Sys.iswindows() || Sys.islinux()
return IVM.exp(val)
IVM.exp!(val)
elseif Sys.isapple()
return AppleAccelerate.exp(val)
return AppleAccelerate.exp!(val)
else
return exp.(val)
val .= exp.(val)
end
end
function optimized_exp(val)
#if Sys.iswindows() || Sys.islinux()
# return IVM.exp(val)
#elseif Sys.isapple()
# return AppleAccelerate.exp(val)
#else
return exp.(val)
#end
end
102 changes: 73 additions & 29 deletions src/likelihood.jl
Original file line number Diff line number Diff line change
Expand Up @@ -24,77 +24,121 @@ function calculate_LL!(myAmica::MultiModelAmica)
end

#Update loop for Lt and u (which is saved in z). Todo: Rename
function loopiloop!(myAmica::SingleModelAmica, y_rho)
(n,_) = size(myAmica.source_signals)

for i in 1:n
Q = calculate_Q(myAmica,i, y_rho) # myAmica.Q
calculate_u!(myAmica,Q,i) # myAmica.z
function loopiloop!(myAmica::SingleModelAmica{T,ncomps,nmix}, y_rho) where {T,ncomps,nmix}
N = size(myAmica.source_signals,2)
n = size(myAmica.source_signals,1)
Q = Array{T}(undef,nmix,N)
gg = myAmica.learnedParameters
for comps in 1:n
calculate_Q!(Q,gg.proportions[:,comps],gg.scale[:,comps],gg.shape[:,comps],@view(y_rho[comps,:,:]))
@debug :Q, Q[1,1],gg.proportions[1,comps],gg.scale[1,comps],gg.shape[1,comps],myAmica.y[comps,1,1] #,@view(y_rho[comps,:,:]))
#Q = calculate_Q(myAmica,i, y_rho) # myAmica.Q
calculate_u!(myAmica,Q,comps) # myAmica.z
@debug "z",myAmica.z[comps,1,1]
calculate_Lt!(myAmica,Q) # myAmica.Q
end
@debug "lt",myAmica.Lt[[1,end]]
end

function loopiloop!(myAmica::MultiModelAmica, y_rho)
M = size(myAmica.models,1)
(n,_) = size(myAmica.models[1].source_signals)

for h in 1:M #run along models
Threads.@threads for i in 1:n #run along components
for i in 1:n #run along components
Q = calculate_Q(myAmica.models[h], i, y_rho)
calculate_u!(myAmica.models[h], @view(Q[:, n, :]),i)
calculate_Lt!(myAmica.models[h], @view(Q[:, n, :]))
end
end
end

@views function calculate_Q(myAmica::SingleModelAmica, i, y_rho)
function calculate_Q(myAmica::SingleModelAmica, i, y_rho)
(n,N) = size(myAmica.source_signals)
m = myAmica.m
Q = zeros(m,N)

for j in 1:m
Q[j,:] .= log(myAmica.learnedParameters.proportions[j,i]) + 0.5 * log(myAmica.learnedParameters.scale[j,i]) .+ logpfun(myAmica.learnedParameters.shape[j,i], y_rho[i, :, j])
gg = myAmica.learnedParameters
@views calculate_Q!(Q,gg.proportions[:,i],gg.scale[:,i],gg.shape[:,i],y_rho[i,:,:])
return Q
end

function calculate_Q!(Q,proportions,scale,shape,y_rho)
for j in 1:length(proportions)
Q[j,:] .= log(proportions[j]) + 0.5 * log(scale[j]) .+ logpfun(shape[j], y_rho[ :, j])
end

return Q
end

#function calculate_Q(myAmica::SingleModelAmica, i, y_rho)
# (n,N) = size(myAmica.source_signals)
# m = myAmica.m
# Q = zeros(m,N)
#
# for k in 1:n
# for j in 1:m
# Q[j,k] = log(myAmica.learnedParameters.proportions[j,i]) +
# 0.5 * log(myAmica.learnedParameters.scale[j,i]) +
# logpfun(myAmica.learnedParameters.shape[j,i], y_rho[i, k, j])
# end
# end
#
# return Q
#end

#calculates u but saves it into z. MultiModel also uses the SingleModel version
@views function calculate_u!(myAmica::SingleModelAmica, Q, i)
function calculate_u!(myAmica::SingleModelAmica{T,ncomps,nmix}, Q, i) where {T,ncomps,nmix}
m = size(myAmica.learnedParameters.scale,1)
#scratch_Q = similar(@view(Q[1,:]))
#scratch_Q = similar(Q)
tmp = Array{T}(undef,size(Q,1),size(Q,2))
if m > 1
for j in 1:m
myAmica.z[i, :, j] .= (1 ./ sum(optimized_exp(Q .- Q[j, :]'), dims=1))[:]
# i is component/channel
ixes = 1:m
for j in ixes # 1:3 m mixtures
# I think it is very hard to optimize the Q .- Q[j,:] inner term
# ix = (ixes)[ixes .∈ 2]
tmp .= @view(Q[:,:]) .- @view(Q[j, :])'
#tmp .= exp.(tmp)
#IVM.exp!(tmp)
optimized_exp!(tmp)
sum!( @view(myAmica.z[i,:,j])',tmp)

end
end
myAmica.z[i,:,:] .= 1 ./ @view(myAmica.z[i,:,:])
end

#Applies location and scale parameter to source signals (per generalized Gaussian)
@views function calculate_y!(myAmica::SingleModelAmica)
for j in 1:myAmica.m
myAmica.y[:,:,j] .= sqrt.(myAmica.learnedParameters.scale[j,:]) .* (myAmica.source_signals[:,:] .- myAmica.learnedParameters.location[j,:])
end
end

@views function calculate_y!(myAmica::MultiModelAmica)
n = size(myAmica.models[1].A,1)
for h in 1:size(myAmica.models,1)
#=Threads.@threads=# for i in 1:n
for j in 1:myAmica.m
myAmica.models[h].y[i,:,j] = sqrt(myAmica.models[h].learnedParameters.scale[j,i]) * (myAmica.models[h].source_signals[i,:] .- myAmica.models[h].learnedParameters.location[j,i])
end

for j in 1:myAmica.m
myAmica.y[:,:,j] .= sqrt.(myAmica.learnedParameters.scale[j,:]) .* (myAmica.source_signals .- myAmica.learnedParameters.location[j,:])
end
end
# @debug myAmica.y[1,1,1:3],myAmica.learnedParameters.scale[1:3,1], myAmica.learnedParameters.location[1:3,1]
# for j in 1:myAmica.m
# for i = 1:size(myAmica.source_signals,2)
# for k = 1: size(myAmica.source_signals,1)
# myAmica.y[k,i,j] = sqrt(myAmica.learnedParameters.scale[j,k]) * (myAmica.source_signals[k,i] - myAmica.learnedParameters.location[j,k])
# end
# end
#end


end

function calculate_y!(myAmica::MultiModelAmica)
calculate_y!.(myAmica.models[h])
end

#Calculates Likelihood for each time sample and for each ICA model
function calculate_Lt!(myAmica::SingleModelAmica,Q)
m = size(myAmica.learnedParameters.scale,1)
if m > 1
Qmax = ones(m,1).*maximum(Q,dims=1);
myAmica.Lt[:] = myAmica.Lt' .+ Qmax[1,:]' .+ logsumexp(Q - Qmax,dims = 1)
myAmica.Lt .+= logsumexp(Q;dims=1)[1,:]
else
myAmica.Lt[:] = myAmica.Lt .+ Q[1,:]#todo: test
myAmica.Lt[:] = myAmica.Lt .+ Q[1,:] #todo: test
end
end

Expand Down
35 changes: 26 additions & 9 deletions src/main.jl
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,9 @@ function fit!(amica::AbstractAmica, data; kwargs...)
end

function amica!(myAmica::AbstractAmica,
data;
lrate = LearningRate(),
shapelrate = LearningRate(;lrate = 0.1,minimum=0.5,maximum=5,init=1.5),
data::AbstractMatrix{T};
lrate = LearningRate{T}(),
shapelrate = LearningRate{T}(;lrate = 0.1,minimum=0.5,maximum=5,init=1.5),
remove_mean = true,
do_sphering = true,
show_progress = true,
Expand All @@ -26,9 +26,9 @@ function amica!(myAmica::AbstractAmica,
update_shape = 1,
mindll = 1e-8,

kwargs...)
kwargs...) where {T}

initialize_shape_parameter(myAmica,shapelrate)
initialize_shape_parameter!(myAmica,shapelrate)

(n, N) = size(data)
m = myAmica.m
Expand All @@ -54,26 +54,39 @@ function amica!(myAmica::AbstractAmica,
lambda = zeros(n, 1)
prog = ProgressUnknown("Minimizing"; showspeed=true)

y_rho = zeros(size(myAmica.y))
y_rho = similar(myAmica.y)

for iter in 1:maxiter
gg = myAmica.learnedParameters
@debug :scale,gg.scale[2],:location,gg.location[2],:proportions,gg.proportions[2],:shape,gg.shape[2]
@debug println("")
#E-step
#@show typeof(myAmica.A),typeof(data)
@debug :A myAmica.A[1:2,1:2]

@debug :source_signals myAmica.source_signals[1],myAmica.source_signals[end]
update_sources!(myAmica, data)
@debug :source_signals myAmica.source_signals[1],myAmica.source_signals[end]
calculate_ldet!(myAmica)
initialize_Lt!(myAmica)
myAmica.Lt .+= LLdetS
calculate_y!(myAmica)



calculate_y!(myAmica)
@debug :y, myAmica.y[1,1:3,1]
# pre-calculate abs(y)^rho
for j in 1:m
for i in 1:n
y_rho[i,:,j] .= optimized_pow(abs.(myAmica.y[i,:,j]), myAmica.learnedParameters.shape[j,i])
@views y_rho[i,:,j] .= (abs.(myAmica.y[i,:,j]) .^myAmica.learnedParameters.shape[j,i])
end
end



loopiloop!(myAmica, y_rho) #Updates y and Lt. Todo: Rename


calculate_LL!(myAmica)


Expand Down Expand Up @@ -106,8 +119,11 @@ function amica!(myAmica::AbstractAmica,
rethrow()
end
end

@debug iter, :A myAmica.A[1:2,1:2]

reparameterize!(myAmica, data)

@debug iter, :A myAmica.A[1:2,1:2]
#Shows current progress
show_progress && ProgressMeter.next!(prog; showvalues=[(:LL, myAmica.LL[iter])])

Expand All @@ -120,5 +136,6 @@ function amica!(myAmica::AbstractAmica,
if remove_mean
add_means_back!(myAmica, removed_mean)
end
@debug myAmica.LL
return myAmica
end
Loading