Simple examples

This tutorial was generated using Literate.jl. Download the source as a .jl file.

This tutorial is a collection of examples of small nonlinear programs.

Required packages

This tutorial uses the following packages:

using JuMP
import Ipopt
import Random
import Statistics
import Test

The Rosenbrock function

A nonlinear example of the classical Rosenbrock function.

function example_rosenbrock()
    model = Model(Ipopt.Optimizer)
    set_silent(model)
    @variable(model, x)
    @variable(model, y)
    @objective(model, Min, (1 - x)^2 + 100 * (y - x^2)^2)
    optimize!(model)
    Test.@test is_solved_and_feasible(model)
    Test.@test objective_value(model) ≈ 0.0 atol = 1e-10
    Test.@test value(x) ≈ 1.0
    Test.@test value(y) ≈ 1.0
    return
end

example_rosenbrock()

The clnlbeam problem

Based on an AMPL model by Hande Y. Benson

Copyright (C) 2001 Princeton University All Rights Reserved

Permission to use, copy, modify, and distribute this software and its documentation for any purpose and without fee is hereby granted, provided that the above copyright notice appear in all copies and that the copyright notice and this permission notice appear in all supporting documentation.

Source:

H. Maurer and H.D. Mittelman, "The non-linear beam via optimal control with bound state variables," Optimal Control Applications and Methods 12, pp. 19-31, 1991.

function example_clnlbeam()
    N = 1000
    h = 1 / N
    alpha = 350
    model = Model(Ipopt.Optimizer)
    @variables(model, begin
        -1 <= t[1:(N+1)] <= 1
        -0.05 <= x[1:(N+1)] <= 0.05
        u[1:(N+1)]
    end)
    @objective(
        model,
        Min,
        sum(
            0.5 * h * (u[i+1]^2 + u[i]^2) +
            0.5 * alpha * h * (cos(t[i+1]) + cos(t[i])) for i in 1:N
        ),
    )
    @constraint(
        model,
        [i = 1:N],
        x[i+1] - x[i] - 0.5 * h * (sin(t[i+1]) + sin(t[i])) == 0,
    )
    @constraint(
        model,
        [i = 1:N],
        t[i+1] - t[i] - 0.5 * h * u[i+1] - 0.5 * h * u[i] == 0,
    )
    optimize!(model)
    println("""
    termination_status = $(termination_status(model))
    primal_status      = $(primal_status(model))
    objective_value    = $(objective_value(model))
    """)
    Test.@test is_solved_and_feasible(model)
    return
end

example_clnlbeam()
This is Ipopt version 3.14.16, running with linear solver MUMPS 5.7.3.

Number of nonzeros in equality constraint Jacobian...:     8000
Number of nonzeros in inequality constraint Jacobian.:        0
Number of nonzeros in Lagrangian Hessian.............:     4002

Total number of variables............................:     3003
                     variables with only lower bounds:        0
                variables with lower and upper bounds:     2002
                     variables with only upper bounds:        0
Total number of equality constraints.................:     2000
Total number of inequality constraints...............:        0
        inequality constraints with only lower bounds:        0
   inequality constraints with lower and upper bounds:        0
        inequality constraints with only upper bounds:        0

iter    objective    inf_pr   inf_du lg(mu)  ||d||  lg(rg) alpha_du alpha_pr  ls
   0  3.5000000e+02 0.00e+00 0.00e+00  -1.0 0.00e+00    -  0.00e+00 0.00e+00   0
   1  3.5000000e+02 0.00e+00 0.00e+00  -1.7 0.00e+00    -  1.00e+00 1.00e+00   0
   2  3.5000000e+02 0.00e+00 0.00e+00  -3.8 0.00e+00  -2.0 1.00e+00 1.00e+00T  0
   3  3.5000000e+02 0.00e+00 0.00e+00  -5.7 0.00e+00   0.2 1.00e+00 1.00e+00T  0
   4  3.5000000e+02 0.00e+00 0.00e+00  -8.6 0.00e+00  -0.2 1.00e+00 1.00e+00T  0

Number of Iterations....: 4

                                   (scaled)                 (unscaled)
Objective...............:   3.5000000000000318e+02    3.5000000000000318e+02
Dual infeasibility......:   0.0000000000000000e+00    0.0000000000000000e+00
Constraint violation....:   0.0000000000000000e+00    0.0000000000000000e+00
Variable bound violation:   0.0000000000000000e+00    0.0000000000000000e+00
Complementarity.........:   2.5059035596802450e-09    2.5059035596802450e-09
Overall NLP error.......:   2.5059035596802450e-09    2.5059035596802450e-09


Number of objective function evaluations             = 5
Number of objective gradient evaluations             = 5
Number of equality constraint evaluations            = 5
Number of inequality constraint evaluations          = 0
Number of equality constraint Jacobian evaluations   = 5
Number of inequality constraint Jacobian evaluations = 0
Number of Lagrangian Hessian evaluations             = 4
Total seconds in IPOPT                               = 0.029

EXIT: Optimal Solution Found.
termination_status = LOCALLY_SOLVED
primal_status      = FEASIBLE_POINT
objective_value    = 350.0000000000032

Maximum likelihood estimation

This example uses nonlinear optimization to compute the maximum likelihood estimate (MLE) of the parameters of a normal distribution, a.k.a., the sample mean and variance.

function example_mle()
    n = 1_000
    Random.seed!(1234)
    data = randn(n)
    model = Model(Ipopt.Optimizer)
    set_silent(model)
    @variable(model, μ, start = 0.0)
    @variable(model, σ >= 0.0, start = 1.0)
    @objective(
        model,
        Max,
        n / 2 * log(1 / (2 * π * σ^2)) -
        sum((data[i] - μ)^2 for i in 1:n) / (2 * σ^2)
    )
    optimize!(model)
    @assert is_solved_and_feasible(model)
    println("μ             = ", value(μ))
    println("mean(data)    = ", Statistics.mean(data))
    println("σ^2           = ", value(σ)^2)
    println("var(data)     = ", Statistics.var(data))
    println("MLE objective = ", objective_value(model))
    Test.@test value(μ) ≈ Statistics.mean(data) atol = 1e-3
    Test.@test value(σ)^2 ≈ Statistics.var(data) atol = 1e-2
    # You can even do constrained MLE!
    @constraint(model, μ == σ^2)
    optimize!(model)
    @assert is_solved_and_feasible(model)
    Test.@test value(μ) ≈ value(σ)^2
    println()
    println("With constraint μ == σ^2:")
    println("μ                         = ", value(μ))
    println("σ^2                       = ", value(σ)^2)
    println("Constrained MLE objective = ", objective_value(model))
    return
end

example_mle()
μ             = -0.0215521734290741
mean(data)    = -0.021552173429074114
σ^2           = 1.100101397871862
var(data)     = 1.1012026004695599
MLE objective = -1466.6397109231782

With constraint μ == σ^2:
μ                         = 0.6621385003734601
σ^2                       = 0.66213850037346
Constrained MLE objective = -1896.4889420749978

Quadratically constrained programs

A simple quadratically constrained program based on an example from Gurobi.

function example_qcp()
    model = Model(Ipopt.Optimizer)
    set_silent(model)
    @variable(model, x)
    @variable(model, y >= 0)
    @variable(model, z >= 0)
    @objective(model, Max, x)
    @constraint(model, x + y + z == 1)
    @constraint(model, x * x + y * y - z * z <= 0)
    @constraint(model, x * x - y * z <= 0)
    optimize!(model)
    Test.@test is_solved_and_feasible(model)
    print(model)
    println("Objective value: ", objective_value(model))
    println("x = ", value(x))
    println("y = ", value(y))
    Test.@test objective_value(model) ≈ 0.32699 atol = 1e-5
    Test.@test value(x) ≈ 0.32699 atol = 1e-5
    Test.@test value(y) ≈ 0.25707 atol = 1e-5
    return
end

example_qcp()
Max x
Subject to
 x + y + z = 1
 x² + y² - z² ≤ 0
 x² - y*z ≤ 0
 y ≥ 0
 z ≥ 0
Objective value: 0.32699283491387243
x = 0.32699283491387243
y = 0.2570658388068964