import numpy as np import matplotlib.pyplot as plt import scipy.optimize as opt def func(x): return 5*x[0]**2 + x[1]**2 x = np.linspace(-3,3,50) y = np.linspace(-3,3,50) X,Y = np.meshgrid(x,y) # meshgrid returns 2d spatial x and y coordinates given 1d array of x and y XY = np.vstack([X.ravel(), Y.ravel()]) # vertical stacking of flattened (2D-> 1D row) x and y coordinates Z = func(XY).reshape(50,50) # convert function values to 2D for contour plt.contour(X, Y, Z, np.linspace(0,25,50)) # plot contour with level of np.linspace(0,25,50) plt.gca().set_aspect('equal', adjustable='box') # axis equal plt.text(0, 0, 'x', va='center', ha='center', color='red', fontsize=20) plt.show() def func_grad(x): """derivative of 2x^2 + y^2""" grad = np.zeros_like(x) grad[0] = 10*x[0] grad[1] = 2*x[1] return grad def func_hess(x): """hessian of x^2 + y^2; this is not used in the current exercise though..""" n = np.size(x) # we assume this a n x 1 or 1 x n vector hess = np.zeros((n,n),'d') hess[0,0] = 10. hess[1,0] = 0. hess[0,1] = 0. hess[1,1] = 2. def reporter(x): """capture intermediate steps of optimization""" global xs xs.append(x) x0 = np.array([2.,2.]) xs = [x0] result = opt.minimize(func,x0,jac=func_grad,hess=None, callback=reporter) xs = np.array(xs) plt.figure() # contour plt.contour(X, Y, Z, np.linspace(0,25,50)) # plot contour with level of np.linspace(0,25,50) plt.gca().set_aspect('equal', adjustable='box') # axis equal # plot text "x" at 0,0 with centered vertical/horizontal alignments plt.text(0, 0, 'x', va='center', ha='center', color='red', fontsize=20) # plot progress plt.plot(xs[:, 0], xs[:, 1], '-o') plt.show()