8. Optimization: Exercise Solutions

8. Optimization: Exercise Solutions#

1. Try with you own function#

  1. Define a function of your interest (with two or more inputs) to be minimized or maximized.
    It can be an explicit mathematical form, or given implictly as a result of simulation.

import numpy as np
import matplotlib.pyplot as plt
%matplotlib inline
def fun(x, a=10):
    """sine in quardatic valley"""
    x = np.array(x)
    y = (x[0]**2+x[1]**2) + a*np.sin(x[0])
    return y

def fun_grad(x, a=10):
    """gradient of dips(x)"""
    df1 = 2*x[0] + a*np.cos(x[0])
    df2 = 2*x[1]
    return np.array([df1, df2])
  1. Visualize the function, e.g., by surface plot or contour plot.

w = 5
N = 10
x = np.linspace(-w,w,N)
x1, x2 = np.meshgrid(x, x)
y = fun((x1, x2), 10)
fig = plt.figure(figsize=(8, 8))
ax = fig.add_subplot(projection='3d')
ax.plot_surface(x1, x2, y, cmap='viridis')
plt.xlabel("x1"); plt.ylabel("x2"); 
_images/d614d25ead1c8a5ee4c2273cd911073d2b588f1007ad587ef75fef163b4a6427.png
  1. Mixmize or minimize the function using two or more optimization algorithms, e.g.

  • Gradient ascent/descent

  • Newton-Raphson method

  • Evolutionary algorithm

  • scpy.optimize

and compare the results with different starting points and parameters.

Gradiend descent#

def grad_descent(f, df, x0, eta=0.01, eps=1e-6, imax=1000):
    """Gradient descent"""
    xh = np.zeros((imax+1, len(np.ravel([x0]))))  # history
    xh[0] = x0
    f0 = f(x0)  # initialtization
    for i in range(imax):
        x1 = x0 - eta*df(x0)
        f1 = f(x1)
        # print(x1, f1)
        xh[i+1] = x1
        if(f1 <= f0 and f1 > f0 - eps):  # small decrease
            return(x1, f1, xh[:i+2])
        x0 = x1
        f0 = f1
    print("Failed to converge in ", imax, " iterations.")
    return(x1, f1, xh)
x0 = [1,2]
xmin, fmin, xhist = grad_descent(fun, fun_grad, x0, 0.01)
print(xmin, fmin)
plt.contour(x1, x2, y)
plt.plot(xhist[:,0], xhist[:,1], '.-');
[-1.30644001  0.00485736] -7.945799781640874
_images/421f50d6044c0efaa5ff48ec174aef76dac41d6a46801238aa5b7f4906fe4f94.png
x0 = [2,2]
xmin, fmin, xhist = grad_descent(fun, fun_grad, x0, 0.01)
print(xmin, fmin)
plt.contour(x1, x2, y)
plt.plot(xhist[:,0], xhist[:,1], '.-');
[3.83746711 0.00485736] 8.31560917345187
_images/b07234d8b844cb422793d7385503cbcb09be2a639bfd4ae3046fea1dd180240e.png

scipy.optimize#

from scipy.optimize import minimize
x0 = [1,2]
result = minimize(fun, x0, jac=fun_grad, options={'disp': True})
print( result.x, result.fun)
plt.contour(x1, x2, y)
plt.plot(x0[0], x0[1], 'x', result.x[0], result.x[1], 'o');
Optimization terminated successfully.
         Current function value: -7.945823
         Iterations: 6
         Function evaluations: 10
         Gradient evaluations: 10
[-1.30644001e+00  9.72344832e-09] -7.945823375615283
_images/1eaeba2e58410743a67f6a53bd38a8b22a385d76f6089d99c472b255a521fa06.png
x0 = [2,2]
result = minimize(fun, x0, jac=fun_grad, options={'disp': True})
print( result.x, result.fun)
plt.contour(x1, x2, y)
plt.plot(x0[0], x0[1], 'x', result.x[0], result.x[1], 'o');
Optimization terminated successfully.
         Current function value: 8.315586
         Iterations: 9
         Function evaluations: 13
         Gradient evaluations: 13
[ 3.83746711e+00 -9.70435929e-09] 8.315585579477458
_images/ff49506c12fdfc593dacb795919d54b308ed2993723352586d086ad2bda2a85b.png

Option) Set equality or inequality constraints and apply an algorithm for constrained optimization.

# h(x) = x[0] - x[1] = 0
def h(x):
    return 2*x[0] - x[1] - 4
def h_grad(x):
    return np.array([2, -1])

With equality constraint \(h(x)=0\).

x0 = [1,2]
cons = ({'type':'eq', 'fun':h, 'jac':h_grad})
result = minimize(fun, [1,-3], jac=fun_grad,
            method='SLSQP', constraints=cons, options={'disp': True})
print( result.x, result.fun)
plt.contour(x1, x2, y)
plt.plot([0,4], [-4,4])  # h(x) = 0
plt.plot(x0[0], x0[1], 'x', result.x[0], result.x[1], 'o');
Optimization terminated successfully    (Exit mode 0)
            Current function value: 13.081270258785018
            Iterations: 6
            Function evaluations: 7
            Gradient evaluations: 6
[2.13406394 0.26812787] 13.081270258785018
_images/a50ba0b75c8077265877ad829062a3d32542276bc87907240cdfcacb2309fbc8.png
x0 = [1,2]
cons = ({'type':'ineq', 'fun':h, 'jac':h_grad})
result = minimize(fun, [1,-3], jac=fun_grad,
            method='SLSQP', constraints=cons, options={'disp': True})
print( result.x, result.fun)
plt.contour(x1, x2, y)
plt.plot([0,4], [-4,4])  # h(x) = 0
plt.plot(x0[0], x0[1], 'x', result.x[0], result.x[1], 'o');
Optimization terminated successfully    (Exit mode 0)
            Current function value: 8.31558557983486
            Iterations: 13
            Function evaluations: 21
            Gradient evaluations: 13
[3.83746046e+00 1.30908488e-05] 8.31558557983486
_images/1417aabae14c60a9351b0682854de506243bee1812a8ba0390113bba9bc6df6d.png