Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- export nelder_mead
- """
- nelder_mead(nlp)
- This implementation follows the algorithm described in chapter 9 of [1].
- [1] Numerical Optimization (Jorge Nocedal and Stephen J. Wright), Springer, 2006.
- """
- function nelder_mead(nlp :: AbstractNLPModel;
- x :: AbstractVector = copy(nlp.meta.x0),
- vertices :: AbstractArray = Array{typeof(x)}(undef, nlp.meta.nvar+1),
- tol :: Real = √eps(eltype(x)),
- max_time :: Float64 = 30.0,
- max_eval :: Int = -1)
- # Initial simplex
- n = nlp.meta.nvar
- # If the user doesn't provide any vertices points, should it be initialized with undef?
- # Another alternative is to make sure the user provides a matrix instead of an Array
- if !(isassigned(vertices))
- vertices[1] = x
- for j in 1:n
- xt = copy(x)
- xt[j] += xt[j] == 0 ? 0.00025 : 0.05
- vertices[j+1] = xt
- end
- end
- # Really unsure about both these lines, looks concise, but feels wrong
- # Since we're storing an array of different types, the data resulting datatype is Any
- # Another alternative is to instead of using a single array, use two, one for the points, another for the obj value.
- vertices = hcat(vertices, map(x->obj(nlp, x), vertices))
- vertices = vertices[sortperm(vertices[:, 2]), :]
- println("Vertices Datatype : " typeof(vertices))
- k = 0
- el_time = 0.0
- start_time = time()
- tired = neval_obj(nlp) > max_eval >= 0 || el_time > max_time
- norm_opt = norm(vertices[1, 2] - vertices[n+1, 2])
- optimal = norm_opt < tol
- T = eltype(x)
- status =:unknown
- @info log_header([:iter, :f, :nrm], [Int, T, T], hdr_override=Dict(:f => "f(x)", :nrm => "‖x₁ - xₙ+1‖"))
- while !(optimal || tired)
- shrink = true
- x_cen = sum(vertices[i, 1] for i in 1:n)/n
- μ(t) = x_cen + t * (vertices[n+1, 1] - x_cen)
- x_ref = μ(-1)
- f_ref = obj(nlp, x_ref)
- f_bver = vertices[1, 2]
- @info log_row(Any[k, f_bver, norm_opt])
- # Reflection
- if vertices[1,2] <= f_ref < vertices[n,2]
- vertices[n+1,1] = x_ref
- shrink = false
- # Expansion
- elseif f_ref < vertices[1, 2]
- x_exp = μ(-2)
- f_exp = obj(nlp,x_exp)
- f_exp < f_ref ? vertices[n+1, 1] = x_exp : vertices[n+1, 1] = x_ref
- shrink = false
- # Contraction
- elseif f_ref >= vertices[n, 2]
- # Outside
- if vertices[n, 2] <= f_ref < vertices[n+1, 2]
- x_ocn = μ(-0.5)
- f_ocn = obj(nlp, x_ocn)
- if f_ocn <= f_ref
- vertices[n+1, 1] = x_ocn
- shrink = false
- end
- # Inside
- else
- x_icn = μ(0.5)
- f_icn = obj(nlp, x_icn)
- if f_icn < vertices[n+1, 2]
- vertices[n+1, 1]= x_icn
- shrink = false
- end
- end
- end
- if shrink
- for i in 2:n+1
- vertices[i, 1] = 0.5 * (vertices[1, 1] + vertices[i, 1])
- end
- end
- vertices[1:n+1, 2] = map(x->obj(nlp, x), vertices[1:n+1, 1])
- vertices = vertices[sortperm(vertices[:, 2]), :]
- k += 1
- x = vertices[1, 1]
- tired = neval_obj(nlp) > max_eval >= 0 || el_time > max_time
- norm_opt = norm(vertices[1, 2] - vertices[n+1, 2])
- optimal = norm_opt < tol
- el_time = time() - start_time
- # As the algorithm goes, Float32 becomes Float64 and the same happens with Float16.
- # No clue why
- println("Vertices eltype : " eltype(vertices[1]))
- for i in 1:nlp.meta.nvar+1
- println("Vertice " ,i , " : " vertices[i,:])
- end
- end
- if optimal
- status = :acceptable
- elseif tired
- if neval_obj(nlp) > max_eval ≥ 0
- status = :max_eval
- elseif el_time >= max_time
- status = :max_time
- end
- end
- return GenericExecutionStats(status, nlp, solution=vertices[1, 1], objective=vertices[1, 2],
- iter = k, elapsed_time = el_time)
- end
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement