Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,4 @@
/Manifest.toml
/docs/build/
/.CondaPkg/
.DS_Store
4 changes: 4 additions & 0 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,16 @@ authors = ["Alexander Lulkin", "Benedikt V. Ehinger"]
version = "0.1.0"

[deps]
AppleAccelerate = "13e28ba4-7ad8-5781-acae-3021b1ed3924"
BenchmarkTools = "6e4b80f9-dd63-53aa-95a3-0cdb28fa8baf"
ComponentArrays = "b0b7db55-cfe3-40fc-9ded-d10e2dbeff66"
Diagonalizations = "9cd687f3-b62d-43f3-8fd3-ffcd9e581047"
Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f"
GaussianMixtures = "cc18c42c-b769-54ff-9e2a-b28141a64aae"
IntelVectorMath = "c8ce9da6-5d36-5c03-b118-5a70151be7bc"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
LogExpFunctions = "2ab3a3ac-af41-5b50-aa03-7779005ae688"
LoopVectorization = "bdcacae8-1622-11e9-2a5c-532679323890"
MAT = "23992714-dd62-5051-b70f-ba57cb901cac"
Parameters = "d96e819e-fc66-5662-9728-84c9c7592b0a"
ProgressMeter = "92933f4c-e287-5a05-a399-4b506db050ca"
Expand Down
4 changes: 4 additions & 0 deletions src/Amica.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,12 @@ module Amica
using LinearAlgebra
using GaussianMixtures
using Distributions
using IntelVectorMath
using SpecialFunctions
using ProgressMeter
using LoopVectorization
using AppleAccelerate

#using ComponentArrays
using Diagonalizations
using LogExpFunctions
Expand Down
58 changes: 48 additions & 10 deletions src/helper.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,17 +3,18 @@ function removeMean!(input)
mn = mean(input,dims=2)
(n,N) = size(input)
for i in 1:n
input[i,:] = input[i,:] .- mn[i]
input[i,:] .= input[i,:] .- mn[i]
end
return mn
end

#Returns sphered data x. todo:replace with function from lib
function sphering(x)
(n,N) = size(x)
Us,Ss,Vs = svd(x*x'/N)
function sphering!(x)
(_,N) = size(x)
Us,Ss = svd(x*x'/N)
S = Us * diagm(vec(1 ./sqrt.(Ss))) * Us'
return x = S*x
x .= S*x
return S
end

function bene_sphering(data)
Expand All @@ -33,11 +34,48 @@ end

#taken from amica_a.m
#L = det(A) * mult p(s|θ)
function logpfun(x,rho)
return (.-abs.(x).^rho .- log(2) .- loggamma.(1+1/rho))
function logpfun(rho, y_rho)
return .- y_rho .- log(2) .- loggamma(1 + 1 / rho)
end


#taken from amica_a.m
function ffun(x,rho)
return rho .* sign.(x) .* abs.(x) .^(rho.-1)
end
@views function ffun(x::AbstractArray{T, 1}, rho::T) where {T<:Real}
return @inbounds copysign.(optimized_pow(abs.(x), rho - 1), x) .* rho
end

# optimized power function for different cpu architectures
function optimized_pow(lhs::AbstractArray{T, 1}, rhs::T) where {T<:Real}
optimized_pow(lhs, repeat([rhs], length(lhs)))
end

function optimized_pow(lhs::AbstractArray{T, 1}, rhs::AbstractArray{T, 1}) where {T<:Real}
if Sys.iswindows() || Sys.islinux()
return IVM.pow(lhs, rhs)
elseif Sys.isapple()
return AppleAccelerate.pow(lhs, rhs)
else
return lhs .^ rhs
end
end

function optimized_log(val)
if Sys.iswindows() || Sys.islinux()
return IVM.log(val)
elseif Sys.isapple()
return AppleAccelerate.log(val)
else
return log.(val)
end
end


function optimized_exp(val)
if Sys.iswindows() || Sys.islinux()
return IVM.exp(val)
elseif Sys.isapple()
return AppleAccelerate.exp(val)
else
return exp.(val)
end
end
91 changes: 31 additions & 60 deletions src/likelihood.jl
Original file line number Diff line number Diff line change
Expand Up @@ -24,96 +24,67 @@ function calculate_LL!(myAmica::MultiModelAmica)
end

#Update loop for Lt and u (which is saved in z). Todo: Rename
function loopiloop(myAmica::SingleModelAmica)
(n,N) = size(myAmica.source_signals)
m = myAmica.m
Q = zeros(m,N)
@debug (:prop,myAmica.learnedParameters.proportions[1,1],:shape,myAmica.learnedParameters.shape[1],:y,myAmica.y[1])
function loopiloop!(myAmica::SingleModelAmica, y_rho)
(n,_) = size(myAmica.source_signals)

for i in 1:n
Q = calculate_Q(myAmica,Q,i) # myAmica.Q
Q = calculate_Q(myAmica,i, y_rho) # myAmica.Q
calculate_u!(myAmica,Q,i) # myAmica.z
@debug (:z,myAmica.z[1,1:5,1])
calculate_Lt!(myAmica,Q) # myAmica.Q
end
@debug (:Q,Q[1,1])
end

function loopiloop(myAmica::MultiModelAmica)
function loopiloop!(myAmica::MultiModelAmica, y_rho)
M = size(myAmica.models,1)
(n,N) = size(myAmica.models[1].source_signals)
m = myAmica.m
Q = zeros(m,N)
(n,_) = size(myAmica.models[1].source_signals)

for h in 1:M #run along models
Threads.@threads for i in 1:n #run along components
#uses SingleModel versions of functions
Q = calculate_Q(myAmica.models[h],Q,i)
calculate_u!(myAmica.models[h],Q,i)
calculate_Lt!(myAmica.models[h],Q)
Q = calculate_Q(myAmica.models[h], i, y_rho)
calculate_u!(myAmica.models[h], @view(Q[:, n, :]),i)
calculate_Lt!(myAmica.models[h], @view(Q[:, n, :]))
end
end
end

#Applies location and scale parameter to source signals (per generalized Gaussian)
function calculate_y!(myAmica::SingleModelAmica)
n = size(myAmica.A,1)
Threads.@threads for i in 1:n
for j in 1:myAmica.m
myAmica.y[i,:,j] = sqrt(myAmica.learnedParameters.scale[j,i]) * (myAmica.source_signals[i,:] .- myAmica.learnedParameters.location[j,i])
end
@views function calculate_Q(myAmica::SingleModelAmica, i, y_rho)
(n,N) = size(myAmica.source_signals)
m = myAmica.m
Q = zeros(m,N)

for j in 1:m
Q[j,:] .= log(myAmica.learnedParameters.proportions[j,i]) + 0.5 * log(myAmica.learnedParameters.scale[j,i]) .+ logpfun(myAmica.learnedParameters.shape[j,i], y_rho[i, :, j])
end
end

function calculate_y!(myAmica::MultiModelAmica)
n = size(myAmica.models[1].A,1)
for h in 1:size(myAmica.models,1)
#=Threads.@threads=# for i in 1:n
for j in 1:myAmica.m
myAmica.models[h].y[i,:,j] = sqrt(myAmica.models[h].learnedParameters.scale[j,i]) * (myAmica.models[h].source_signals[i,:] .- myAmica.models[h].learnedParameters.location[j,i])
end
end
end
return Q
end

#calculates u but saves it into z. MultiModel also uses the SingleModel version
function calculate_u!(myAmica::SingleModelAmica, Q, i)
@views function calculate_u!(myAmica::SingleModelAmica, Q, i)
m = size(myAmica.learnedParameters.scale,1)
if m > 1
for j in 1:m
Qj = ones(m,1) .* Q[j,:]'
myAmica.z[i,:,j] = 1 ./ sum(exp.(Q-Qj),dims = 1)
myAmica.z[i, :, j] .= (1 ./ sum(optimized_exp(Q .- Q[j, :]'), dims=1))[:]
end
end
end

#no longer in use, multimodel also uses the singlemodel version (see loopiloop)
function calculate_u!(myAmica::MultiModelAmica,Q,i,h)
m = myAmica.m

if m > 1
for j in 1:m
Qj = ones(m,1) .* Q[j,:]'
myAmica.models[h].z[i,:,j] = 1 ./ sum(exp.(Q-Qj),dims = 1)
#Applies location and scale parameter to source signals (per generalized Gaussian)
@views function calculate_y!(myAmica::SingleModelAmica)
#for j in 1:myAmica.m
# myAmica.y[:,:,j] .= sqrt.(myAmica.learnedParameters.scale[j,:]) .* (myAmica.source_signals .- myAmica.learnedParameters.location[j,:])
#end
for j in 1:myAmica.m
for k = 1: size(myAmica.m,1)
for i = 1:size(myAmica.m,2)
myAmica.y[k,i,j] = sqrt(myAmica.learnedParameters.scale[j,k]) * (myAmica.source_signals[k,i] - myAmica.learnedParameters.location[j,k])
end
end
end
end

#Calculates densities for each generalized Gaussian j. Currently used my MultiModelAmica too
function calculate_Q(myAmica::SingleModelAmica, Q, i)
m = size(myAmica.learnedParameters.scale, 1) #m = number of GGs, can't use myAmica.m in case this gets used my MultiModelAmica

for j in 1:m
Q[j,:] = log(myAmica.learnedParameters.proportions[j,i]) + 0.5*log(myAmica.learnedParameters.scale[j,i]) .+ logpfun(myAmica.y[i,:,j],myAmica.learnedParameters.shape[j,i])
end
return Q
end

#no longer in use, Q is no longer saved in Amica object but instead in loopiloop
function calculate_Q!(myAmica::MultiModelAmica, i, h)
m = myAmica.m
for j in 1:m
myAmica.Q[j,:] = log(myAmica.models[h].learnedParameters.proportions[j,i]) + 0.5*log(myAmica.models[h].learnedParameters.scale[j,i]) .+ logpfun(myAmica.models[h].y[i,:,j],myAmica.models[h].learnedParameters.shape[j,i])
end
function calculate_y!(myAmica::MultiModelAmica)
calculate_y!.(myAmica.models[h])
end

#Calculates Likelihood for each time sample and for each ICA model
Expand Down
34 changes: 27 additions & 7 deletions src/main.jl
Original file line number Diff line number Diff line change
Expand Up @@ -33,32 +33,50 @@ function amica!(myAmica::AbstractAmica,
(n, N) = size(data)
m = myAmica.m

println("m: $(m), n: $(n), N: $(N)")

#Prepares data by removing means and/or sphering
if remove_mean
removed_mean = removeMean!(data)
end
if do_sphering
data = sphering(data)
S = sphering!(data)
myAmica.S = S
LLdetS = logabsdet(S)[1]
else
myAmica.S = I
LLdetS = 0
end

dLL = zeros(1, maxiter)
fp = zeros(m ,N)

#todo put them into object
lambda = zeros(n, 1)
kappa = zeros(n, 1)
sigma2 = zeros(n, 1)

prog = ProgressUnknown("Minimizing"; showspeed=true)

y_rho = zeros(size(myAmica.y))

for iter in 1:maxiter
#E-step
update_sources!(myAmica, data)
calculate_ldet!(myAmica)
initialize_Lt!(myAmica)
myAmica.Lt .+= LLdetS
calculate_y!(myAmica)
loopiloop(myAmica) #Updates y and Lt. Todo: Rename

# pre-calculate abs(y)^rho
for j in 1:m
for i in 1:n
y_rho[i,:,j] .= optimized_pow(abs.(myAmica.y[i,:,j]), myAmica.learnedParameters.shape[j,i])
end
end



loopiloop!(myAmica, y_rho) #Updates y and Lt. Todo: Rename
calculate_LL!(myAmica)


@debug (:LL,myAmica.LL)
#Calculate difference in loglikelihood between iterations
if iter > 1
Expand All @@ -78,7 +96,7 @@ function amica!(myAmica::AbstractAmica,
#M-step
try
#Updates parameters and mixing matrix
update_loop!(myAmica, fp, lambda, shapelrate, update_shape, iter, do_newton, newt_start_iter, lrate)
update_loop!(myAmica, lambda, y_rho, shapelrate, update_shape, iter, do_newton, newt_start_iter, lrate)
catch e
#Terminates if NaNs are detected in parameters
if isa(e,AmicaNaNException)
Expand All @@ -96,6 +114,8 @@ function amica!(myAmica::AbstractAmica,
end
#If parameters contain NaNs, the algorithm skips the A update and terminates by jumping here
@label escape_from_NaN


#If means were removed, they are added back
if remove_mean
add_means_back!(myAmica, removed_mean)
Expand Down
22 changes: 11 additions & 11 deletions src/newton.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@ function newton_method!(myAmica::SingleModelAmica, iter, g, kappa, do_newton, ne

sigma2 = sum(myAmica.source_signals.^2,dims=2) / N #is probably called sigma2 cause always squared

dA = Matrix{Float64}(I, n, n) - g * myAmica.source_signals[:,:]'
bflag = 0
dA = Matrix{Float64}(I, n, n) - g * myAmica.source_signals'
bflag = false
B = zeros(n,n)

for i in 1:n
Expand All @@ -20,28 +20,28 @@ function newton_method!(myAmica::SingleModelAmica, iter, g, kappa, do_newton, ne
if denom > 0
B[i,k] = (-kappa[k] * sigma2[i] * dA[i,k] + dA[k,i]) / denom
else
bflag = 1
bflag = true
end
end
end
end
if (bflag == 0) && (do_newton == 1) && (iter > newt_start_iter)
myAmica.A[:,:] = myAmica.A[:,:] + lrate * myAmica.A[:,:] * B
if (bflag == false) && (do_newton == 1) && (iter > newt_start_iter)
myAmica.A += lrate * myAmica.A * B
else
myAmica.A[:,:] = myAmica.A[:,:] - lnatrate * myAmica.A[:,:] * dA
myAmica.A -= lnatrate * myAmica.A * dA
end
end

function newton_method!(myAmica::MultiModelAmica, h, iter, g, kappa, do_newton, newt_start_iter, lrate::LearningRate, lambda)
@views function newton_method!(myAmica::MultiModelAmica, h, iter, g, kappa, do_newton, newt_start_iter, lrate::LearningRate, lambda)

lnatrate = lrate.natural_rate
lrate = lrate.lrate
M = size(myAmica.models)
(n,N) = size(myAmica.models[1].source_signals)

sigma2 = myAmica.models[h].source_signals[:,:].^2 * myAmica.ica_weights_per_sample[h,:] /myAmica.ica_weights[h]
sigma2 = myAmica.models[h].source_signals.^2 * myAmica.ica_weights_per_sample[h,:] /myAmica.ica_weights[h]

dA = Matrix{Float64}(I, n, n) - g * myAmica.models[h].source_signals[:,:]'
dA = Matrix{Float64}(I, n, n) - g * myAmica.models[h].source_signals'
bflag = 0
B = zeros(n,n)

Expand All @@ -60,8 +60,8 @@ function newton_method!(myAmica::MultiModelAmica, h, iter, g, kappa, do_newton,
end
end
if (bflag == 0) && (do_newton == 1) && (iter > newt_start_iter)
myAmica.models[h].A[:,:] = myAmica.models[h].A[:,:] + lrate * myAmica.models[h].A[:,:] * B
myAmica.models[h].A = myAmica.models[h].A + lrate * myAmica.models[h].A * B
else
myAmica.models[h].A[:,:] = myAmica.models[h].A[:,:] - lnatrate * myAmica.models[h].A[:,:] * dA
myAmica.models[h].A = myAmica.models[h].A - lnatrate * myAmica.models[h].A * dA
end
end
Loading