Quick Start¶
Example 1 — Unconstrained minimization (Rosenbrock):
import numpy as np
from optimization import minimize_bfgs_fd
def rosenbrock(x):
return (1 - x[0])**2 + 100*(x[1] - x[0]**2)**2
result = minimize_bfgs_fd(rosenbrock, x_guess=np.array([0.0, 0.0]))
print(result.x) # -> [1. 1.]
print(result.fval) # -> ~0.0
Example 2 — Bounded minimization:
from optimization import minimize_bounds_bfgs_fd
import numpy as np
def f(x):
return (x[0] - 1)**2 + (x[1] - 2)**2
result = minimize_bounds_bfgs_fd(
f, ibtype=0,
xlb=np.array([0.0, 0.0]),
xub=np.array([3.0, 3.0]),
x_guess=np.array([2.5, 0.5]),
)
print(result.x) # -> [1. 2.]
Example 3 — Linear programming:
from optimization import linear_program
import numpy as np
c = np.array([-1.0, -2.0])
A = np.array([[1.0, 1.0], [1.0, -1.0]])
bl = np.array([-np.inf, -np.inf])
bu = np.array([1.5, 0.5])
result = linear_program(
c=c, A=A, bl=bl, bu=bu,
irtype=np.array([1, 1]),
ibtype=1,
xlb=np.zeros(2), xub=np.ones(2)*np.inf,
n=2,
)
print(result.x) # -> [0.5, 1.0]
print(result.fval) # -> -2.5
Example 4 — Nonlinear least squares:
from optimization import nonlinear_least_squares_fd
import numpy as np
t = np.array([0.0, 0.5, 1.0, 1.5, 2.0])
y_data = 2.0 * np.exp(0.5 * t)
def residuals(x):
return x[0] * np.exp(x[1] * t) - y_data
result = nonlinear_least_squares_fd(
residuals, m=5, x_guess=np.array([1.0, 0.0])
)
print(result.x) # -> [2. 0.5]