Unconstrained Multivariate Examples

Example Code

UMINF — BFGS with finite-difference gradient:

"""IMSL UMINF example: minimize Rosenbrock function using BFGS with FD gradient.

Reproduces the IMSL UMINF published example:
- Function: f(x) = 100*(x2 - x1^2)^2 + (1 - x1)^2  (Rosenbrock)
- Starting point: (-1.2, 1.0)
- Expected minimum at x = (1.0, 1.0), f = 0.0

Outputs:
- Table printed to stdout
- SVG plot saved to test_output/demo_imsl_uminf.svg
"""

from __future__ import annotations

from pathlib import Path
from typing import Dict

import matplotlib.pyplot as plt
import numpy as np

from optimization import minimize_bfgs_fd


def run_demo_imsl_uminf() -> Dict[str, object]:
    """Run IMSL UMINF example: minimize Rosenbrock with BFGS/FD gradient.

    Args:
        None

    Returns:
        Dict[str, object]: Result dict with keys ``x``, ``fval``,
            ``n_iter``, ``n_fev``, and ``plot_path``.
    """
    def rosenbrock(x: np.ndarray) -> float:
        return 100.0 * (x[1] - x[0] ** 2) ** 2 + (1.0 - x[0]) ** 2

    x0 = np.array([-1.2, 1.0])
    result = minimize_bfgs_fd(rosenbrock, x_guess=x0, max_iter=500, max_fev=2000)

    print("\nIMSL UMINF Example: Rosenbrock Minimization (BFGS / FD gradient)")
    print("-" * 55)
    print(f"{'Parameter':<25} {'Value':>20}")
    print("-" * 55)
    print(f"{'x1 (solution)':<25} {result.x[0]:>20.6f}")
    print(f"{'x2 (solution)':<25} {result.x[1]:>20.6f}")
    print(f"{'f(x)':<25} {result.fval:>20.8f}")
    print(f"{'Iterations':<25} {result.n_iter:>20}")
    print(f"{'Function evals':<25} {result.n_fev:>20}")
    print(f"{'Converged':<25} {str(result.success):>20}")
    print("-" * 55)

    output_dir = Path("test_output")
    output_dir.mkdir(parents=True, exist_ok=True)
    plot_path = output_dir / "demo_imsl_uminf.svg"

    x1 = np.linspace(-2.0, 2.0, 300)
    x2 = np.linspace(-1.0, 3.0, 300)
    X1, X2 = np.meshgrid(x1, x2)
    Z = 100.0 * (X2 - X1 ** 2) ** 2 + (1.0 - X1) ** 2

    fig, ax = plt.subplots(figsize=(8, 6))
    cs = ax.contourf(X1, X2, np.log1p(Z), levels=30, cmap="viridis")
    fig.colorbar(cs, ax=ax, label="log(1+f)")
    ax.scatter(*x0, color="white", s=80, marker="^", zorder=5, label="Start (-1.2, 1.0)")
    ax.scatter(*result.x, color="red", s=80, marker="*", zorder=5, label=f"Min ({result.x[0]:.4f},{result.x[1]:.4f})")
    ax.set_xlabel("x1")
    ax.set_ylabel("x2")
    ax.set_title("IMSL UMINF: Rosenbrock Minimization (BFGS/FD)")
    ax.legend()
    fig.tight_layout()
    fig.savefig(plot_path, format="svg")
    plt.close(fig)

    return {
        "x": result.x,
        "fval": result.fval,
        "n_iter": result.n_iter,
        "n_fev": result.n_fev,
        "plot_path": str(plot_path),
    }


if __name__ == "__main__":
    run_demo_imsl_uminf()

UMING — BFGS with analytic gradient:

"""IMSL UMING example: minimize Rosenbrock function using BFGS with analytic gradient.

Reproduces the IMSL UMING published example:
- Function: f(x) = 100*(x2-x1^2)^2 + (1-x1)^2 (Rosenbrock)
- Gradient: grad_f = [-400*(x2-x1^2)*x1 - 2*(1-x1), 200*(x2-x1^2)]
- Starting point: (-1.2, 1.0)
- Expected minimum at x = (1.0, 1.0), f = 0.0

Outputs:
- Table printed to stdout
- SVG plot saved to test_output/demo_imsl_uming.svg
"""

from __future__ import annotations

from pathlib import Path
from typing import Dict

import matplotlib.pyplot as plt
import numpy as np

from optimization import minimize_bfgs


def run_demo_imsl_uming() -> Dict[str, object]:
    """Run IMSL UMING example: Rosenbrock with BFGS and analytic gradient.

    Args:
        None

    Returns:
        Dict[str, object]: Result dict with keys ``x``, ``fval``,
            ``n_iter``, ``n_fev``, ``n_gev``, and ``plot_path``.
    """
    def rosenbrock(x: np.ndarray) -> float:
        return 100.0 * (x[1] - x[0] ** 2) ** 2 + (1.0 - x[0]) ** 2

    def grad_rosenbrock(x: np.ndarray) -> np.ndarray:
        return np.array([
            -400.0 * (x[1] - x[0] ** 2) * x[0] - 2.0 * (1.0 - x[0]),
            200.0 * (x[1] - x[0] ** 2),
        ])

    x0 = np.array([-1.2, 1.0])
    result = minimize_bfgs(rosenbrock, grad_rosenbrock, x_guess=x0, max_iter=500, max_fev=2000)

    print("\nIMSL UMING Example: Rosenbrock Minimization (BFGS / analytic gradient)")
    print("-" * 60)
    print(f"{'Parameter':<25} {'Value':>25}")
    print("-" * 60)
    print(f"{'x1 (solution)':<25} {result.x[0]:>25.6f}")
    print(f"{'x2 (solution)':<25} {result.x[1]:>25.6f}")
    print(f"{'f(x)':<25} {result.fval:>25.8f}")
    print(f"{'Iterations':<25} {result.n_iter:>25}")
    print(f"{'Function evals':<25} {result.n_fev:>25}")
    print(f"{'Gradient evals':<25} {result.n_gev:>25}")
    print(f"{'Converged':<25} {str(result.success):>25}")
    print("-" * 60)

    output_dir = Path("test_output")
    output_dir.mkdir(parents=True, exist_ok=True)
    plot_path = output_dir / "demo_imsl_uming.svg"

    x1 = np.linspace(-2.0, 2.0, 300)
    x2 = np.linspace(-1.0, 3.0, 300)
    X1, X2 = np.meshgrid(x1, x2)
    Z = 100.0 * (X2 - X1 ** 2) ** 2 + (1.0 - X1) ** 2

    fig, ax = plt.subplots(figsize=(8, 6))
    cs = ax.contourf(X1, X2, np.log1p(Z), levels=30, cmap="plasma")
    fig.colorbar(cs, ax=ax, label="log(1+f)")
    ax.scatter(*x0, color="white", s=80, marker="^", zorder=5, label="Start")
    ax.scatter(*result.x, color="red", s=80, marker="*", zorder=5, label="Minimum")
    ax.set_xlabel("x1")
    ax.set_ylabel("x2")
    ax.set_title("IMSL UMING: Rosenbrock Minimization (BFGS / Analytic Grad)")
    ax.legend()
    fig.tight_layout()
    fig.savefig(plot_path, format="svg")
    plt.close(fig)

    return {
        "x": result.x,
        "fval": result.fval,
        "n_iter": result.n_iter,
        "n_fev": result.n_fev,
        "n_gev": result.n_gev,
        "plot_path": str(plot_path),
    }


if __name__ == "__main__":
    run_demo_imsl_uming()

Input (Console)

Run the unconstrained minimization scripts from the package root:

python examples/example_imsl_uminf.py
python examples/example_imsl_uming.py

Plot Output

Generated SVG plots:

UMINF: Rosenbrock minimization with BFGS/FD
UMING: Rosenbrock minimization with analytic gradient

Output Console

UMINF console output:

Optimization did not converge: Desired error not necessarily achieved due to precision loss.

IMSL UMINF Example: Rosenbrock Minimization (BFGS / FD gradient)
-------------------------------------------------------
Parameter                                Value
-------------------------------------------------------
x1 (solution)                         0.999996
x2 (solution)                         0.999991
f(x)                                0.00000000
Iterations                                  33
Function evals                             237
Converged                                False
-------------------------------------------------------

UMING console output:

IMSL UMING Example: Rosenbrock Minimization (BFGS / analytic gradient)
------------------------------------------------------------
Parameter                                     Value
------------------------------------------------------------
x1 (solution)                              1.000000
x2 (solution)                              1.000000
f(x)                                     0.00000000
Iterations                                       34
Function evals                                   41
Gradient evals                                   41
Converged                                      True
------------------------------------------------------------