diff --git a/src/helper.jl b/src/helper.jl index 306314f..3cd3c77 100755 --- a/src/helper.jl +++ b/src/helper.jl @@ -83,16 +83,17 @@ function calculate_z_y_Lt!(myAmica,h) end function get_sources!(myAmica::AbstractAmica, data, h) - b = myAmica.source_signals + #b = myAmica.source_signals M = myAmica.M n = myAmica.n if M == 1 b = pinv(myAmica.A[:,:,h]) * data - end - for i in 1:n - if M > 1 - Wh = pinv(myAmica.A[:,:,h]) - b[i,:,h] = Wh[i,:]' * data .- Wh[i,:]' * myAmica.centers[:,h] + else + for i in 1:n + + Wh = pinv(myAmica.A[:,:,h]) + b[i,:,h] = Wh[i,:]' * data .- Wh[i,:]' * myAmica.centers[:,h] + end end myAmica.source_signals[:,:,:] = b diff --git a/src/likelihood.jl b/src/likelihood.jl index 8b5cf88..a880083 100755 --- a/src/likelihood.jl +++ b/src/likelihood.jl @@ -26,15 +26,30 @@ function ffun(x,rho) #taken from amica.m return rho * sign.(x) .* abs.(x) .^(rho-1) end + +function loglikelihoodMMGG(loc::AbstractMatrix,scale::AbstractMatrix,shape::AbstractMatrix,mixtureproportions::AbstractMatrix,data::AbstractMatrix) + return hcat(loglikelihoodMMGG.( eachcol(loc), + eachcol(scale), + eachcol(shape), + eachcol(mixtureproportions), + eachrow(data))...) + +end +function loglikelihoodMMGG(location::AbstractVector,scale::AbstractVector,shape::AbstractVector,mixtureproportions::AbstractVector,data::AbstractVector) + MM = MMGG(location,scale,shape,mixtureproportions) + return loglikelihood.(MM,data) +end # calculate loglikelihood for each sample in vector x, given a parameterization of a mixture of PGeneralizedGaussians -function loglikelihoodMMGG(μ::AbstractVector,α::AbstractVector,ρ::AbstractVector,data::AbstractVector,π::AbstractVector) +function MMGG(location::AbstractVector,scale::AbstractVector,shape::AbstractVector,mixtureproportions::AbstractVector) # take the vectors of μ,α,ρ and generate a GG from each - GGvec = PGeneralizedGaussian.(μ,α,ρ) - MM = MixtureModel(GGvec,Vector(π)) # make it a mixture model with prior probabilities π - return loglikelihood.(MM,data) # apply the loglikelihood to each sample individually (note the "." infront of .(MM,x)) + GGvec = PGeneralizedGaussian.(location,scale,shape;check_args=false) + MM = MixtureModel(GGvec,Vector(mixtureproportions)) # make it a mixture model with prior probabilities π + return MM + # return loglikelihood.(MM,data) # apply the loglikelihood to each sample individually (note the "." infront of .(MM,x)) end +GMM() function calculate_Lt!(myAmica, h) myAmica.ldet[h] = -log(abs(det(myAmica.A[:,:,h]))) diff --git a/src/main.jl b/src/main.jl index 342db87..04d8cc4 100755 --- a/src/main.jl +++ b/src/main.jl @@ -86,7 +86,8 @@ function amica!(myAmica::AbstractAmica, lrate = calculate_lrate!(dLL, lrate, mindll, iter,newt_start_iter, do_newton, iterwin) #lrate < 0 ? break : "" sdll = sum(dLL[iter-iterwin+1:iter])/iterwin - # @show sdll + + if (sdll > 0) && (sdll < mindll) println("LL increase to low. Stop at iteration ", iter) break @@ -130,7 +131,7 @@ function amica!(myAmica::AbstractAmica, #@show A #@show LL[iter] - show_progress && ProgressMeter.next!(prog; showvalues=[(:LL, myAmica.LL[iter])]) + show_progress && ProgressMeter.next!(prog; showvalues=[(:LL, myAmica.LL[iter]),(:lrate, lrate.lrate)]) end diff --git a/test/bene_testAmica.jl b/test/bene_testAmica.jl index dccecb6..22fff05 100644 --- a/test/bene_testAmica.jl +++ b/test/bene_testAmica.jl @@ -16,7 +16,7 @@ x = A*s #A = [1 1 0 1; 1 1 0 0; 1 0 1 1; 0 0 0 1] #x = hcat(x,A*s) -am = fit(MultiModelAmica,x;maxiter=500,M=1) +am = fit(MultiModelAmica,x;maxiter=2000,M=1) size(am.A) W = inv(am.A[:,:,1]) #previously [:,:,2] @@ -32,8 +32,9 @@ series(f[2,1],W*x) ax,h = heatmap(f[2,2],am.A[:,:,1]) Colorbar(f[2,3],h) -series(f[4,1],x[:,1:500]) -series(f[4,2],(W*x)[:,1:500]) +series(f[3,1],am.source_signals[:,1:200,1]) +series(f[4,1],x[:,1:200]) +series(f[4,2],(W*x)[:,1:200]) f #series(f[4,1],inv(W)'*x) #series(f[6,1],W'*x) diff --git a/test/test_GGMM.jl b/test/test_GGMM.jl new file mode 100644 index 0000000..b3fe233 --- /dev/null +++ b/test/test_GGMM.jl @@ -0,0 +1,81 @@ +#--- +using CairoMakie +using SignalAnalysis +using Amica +#t = range(0,20*π,length=1000) +s =rand(PinkGaussian(length(t)),4)' +#s[2,:] = sin.(t) +#s[3,:] = sin.(2 .* t) +#s[4,:] = sin.(10 .* t) +#s = s .* [1,2,3,4] +function fill_gmm(n) +gm = rand(GMM,3,2) +return Vector(rand(MixtureModel(gm),n)[1,:]) +end +#A = rand(size(s,1),size(s,1)) +s[2,:] = fill_gmm(1000) +s[3,:] = fill_gmm(1000) +s[4,:] = fill_gmm(1000) + +A = [1 1 0 0; 0 1 1 0; 0 0 1 1; 1 0 1 0] + +x = A*s + +#A = [1 1 0 1; 1 1 0 0; 1 0 1 1; 0 0 0 1] +#x = hcat(x,A*s) + +am = fit(MultiModelAmica,x;maxiter=2000,M=1) + +#--- +scale = am.learnedParameters.β #scale => GG.alpha +mixtureproportions = am.learnedParameters.α #GG.prior +shape = am.learnedParameters.ρ #shape => GG.rho +location = am.learnedParameters.μ #location +data = am.source_signals + +l = Amica.loglikelihoodMMGG( location[:,:,1], + scale[:,:,1], + shape[:,:,1], + mixtureproportions[:,:,1], + data[:,:,1]) + + + + +MM = Amica.MMGG( location[:,1,1], +scale[:,1,1], +shape[:,1,1], +mixtureproportions[:,1,1], +data[1,:,1]) + +#--- +f = Figure() +ax =f[1,1] = Axis(f) +for n = 1:4 +MM = Amica.MMGG( location[:,n,1], +#[1,1,1.], +(scale[:,n,1]), +shape[:,n,1], + mixtureproportions[:,n,1]) + +x_t = -5:0.1:5 + +lines!(x_t,pdf.(Ref(MM),x_t)) +end +n = 3 +hist!(data[n,:,1],bins=x_t;normalization=:pdf,color=RGBAf(1,0,0,0.2)) +current_figure() +#--- +#= +j = 1 +i = 1 +h = 1 + +Q = zeros(3,1000) +for j in 1:3 + +Q[j,:] = log.(am.learnedParameters.α[j,i,h]) + 0.5*log.(am.learnedParameters.β[j,i,h]) .+ Amica.logpfun(am.y[i,:,j,h],am.learnedParameters.ρ[j,i,h]) +end +Qmax = ones(3,1).*maximum(Q,dims=1); +Qmax[1,:]' .+ log.(sum(exp.(am.Q - Qmax),dims = 1)) +=# \ No newline at end of file diff --git a/test/test_zygote.jl b/test/test_zygote.jl new file mode 100644 index 0000000..4e1c556 --- /dev/null +++ b/test/test_zygote.jl @@ -0,0 +1,56 @@ +using Amica +using LinearAlgebra +t = range(0,20*π,length=1000) +s =rand(PinkGaussian(length(t)),4)' +s[2,:] = sin.(t) +s[3,:] = sin.(2 .* t) +s[4,:] = sin.(10 .* t) +s = s .* [1,2,3,4] +#A = rand(size(s,1),size(s,1)) +A_org = [1 1 0 0; 0 1 1 0; 0 0 1 1; 1 0 1 0] + +x = A_org*s + + +am = MultiModelAmica(x) + +scale = am.learnedParameters.β[:,:,1] #scale => GG.alpha +mixtureproportions = am.learnedParameters.α[:,:,1] #GG.prior +shape = am.learnedParameters.ρ[:,:,1] #shape => GG.rho +location = am.learnedParameters.μ[:,:,1] #location +A = am.A[:,:,1] + + +using Optimization +myFun(pa,tmp) = myFun(pa) + +function myFun(pa) + sources = inv(pa.A)*x + L1 = log(abs(det(pa.A))) + mp = pa.mixtureproportions ./ sum(pa.mixtureproportions,dims=1) + #@show mp + L2 = Amica.loglikelihoodMMGG( pa.location, + pa.scale, + pa.shape, + mp, + sources) + return -(L1+sum(L2)) +end + +using OptimizationOptimJL +using ComponentArrays + +para =ComponentArray(;A,location,scale,shape,mixtureproportions) +lb = similar(para) +lb .= -Inf +ub = .- deepcopy(lb) + +lb.scale .= 0.0001 +lb.shape .= 0.0001 +lb.mixtureproportions .= 0 +ub.mixtureproportions .= 1 + +using ModelingToolkit +df = OptimizationProblem(OptimizationFunction(myFun, Optimization.AutoZygote()), para,[];lb = lb,ub = ub) +sol = solve(df,LBFGS(),maxtime =180) +