Skip to content

Commit f5d0512

Browse files
committed
Refactor names for clarity
1 parent fb5e434 commit f5d0512

File tree

1 file changed

+15
-13
lines changed

1 file changed

+15
-13
lines changed

src/selections.jl

Lines changed: 15 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# GA seclections
1+
# GA selections
22
# ==============
33

44
# Rank-based fitness assignment
@@ -7,12 +7,14 @@ function ranklinear(sp::Float64)
77
@assert 1.0 <= sp <= 2.0 "Selective pressure has to be in range [1.0, 2.0]."
88
function rank(fitness::Vector{<:Real}, N::Int)
99
λ = length(fitness)
10-
idx = sortperm(fitness)
11-
ranks = zeros(λ)
10+
rank = sortperm(fitness)
11+
12+
prob = Vector{Float64}(undef, λ)
1213
for i in 1:λ
13-
ranks[i] = ( 2 - sp + 2*(sp - 1)*(idx[i] - 1) /- 1) ) / λ
14+
prob[i] = ( 2.0- sp + 2.0*(sp - 1.0)*(rank[i] - 1.0) /- 1.0) ) / λ
1415
end
15-
return pselection(ranks, N)
16+
17+
return pselection(prob, N)
1618
end
1719
return rank
1820
end
@@ -21,13 +23,10 @@ end
2123
function uniformranking::Int)
2224
function uniformrank(fitness::Vector{<:Real}, N::Int)
2325
λ = length(fitness)
24-
idx = sortperm(fitness, rev=true)
25-
@assert μ < λ "μ should be less then $(λ)"
26-
ranks = similar(fitness, Float64)
27-
for i in 1:μ
28-
ranks[idx[i]] = 1/μ
29-
end
30-
return pselection(ranks, N)
26+
@assert μ < λ "μ should equal $(λ)"
27+
28+
prob = fill(1/μ, μ)
29+
return pselection(prob, N)
3130
end
3231
return uniformrank
3332
end
@@ -40,11 +39,13 @@ end
4039

4140
# Stochastic universal sampling (SUS)
4241
function sus(fitness::Vector{<:Real}, N::Int)
42+
selected = Vector{Int}(undef, N)
43+
4344
F = sum(fitness)
4445
P = F/N
46+
4547
start = P*rand()
4648
pointers = [start+P*i for i = 0:(N-1)]
47-
selected = Vector{Int}(undef, N)
4849
i = c = 1
4950
for P in pointers
5051
while sum(fitness[1:i]) < P
@@ -53,6 +54,7 @@ function sus(fitness::Vector{<:Real}, N::Int)
5354
selected[c] = i
5455
c += 1
5556
end
57+
5658
return selected
5759
end
5860

0 commit comments

Comments
 (0)