#! /usr/bin/env python3 # def hex2_gradient2 ( ): #*****************************************************************************80 # ## hex2_gradient2 uses gradient descent on the hex2 function. # # Discussion: # # Gradient descent for a function of 2 variables. # # Licensing: # # This code is distributed under the GNU LGPL license. # # Modified: # # 23 September 2019 # # Author: # # John Burkardt # # import matplotlib.pyplot as plt import numpy as np from gradient_descent2 import gradient_descent2 print ( '' ) print ( 'hex2_gradient2:' ) print ( ' Seek minimizer of a function z(x,y).' ) # # Choose a random initial guess. # x0 = np.array ( [ \ - 3.0 + 6.0 * np.random.rand ( ), \ - 3.0 + 6.0 * np.random.rand ( ) ] ) x0 = np.array ( [ 2.0, 1.5 ] ) # # Choose a small learning rate. # r = 0.10 # # Choose a small tolerance for the stepsize. # dxtol = 0.00001 # # Choose a small tolerance for the derivative. # dftol = 0.001 # # Maximum number of iterations # itmax = 100 x, it = gradient_descent2 ( hex2, hex2_df, x0, r, dxtol, dftol, itmax ) print ( '' ) print ( ' ', it, 'gradient descent steps were taken.' ) print ( ' Initial x = ', x0, ' f(x) = ', hex2 ( x0 ), ' f\'(x) = ', hex2_df(x0) ) print ( ' Final x = ', x, ' f(x) = ', hex2 ( x ), ' f\'(x) = ', hex2_df(x) ) # # Terminate. # print ( '' ) print ( 'hex2_gradient2:' ) print ( ' Normal end of execution.' ) return def hex2 ( x ): #*****************************************************************************80 # ## hex2 evaluates the function to be minimized. # value = 2.0 * x[0]**2 - 1.05 * x[0]**4 + x[0]**6 / 6.0 + x[0] * x[1] + x[1]**2 return value def hex2_df ( x ): #*****************************************************************************80 # ## hex2_df evaluates the derivative of the function to be minimized. # import numpy as np df = np.array ( [ \ 4.0 * x[0] - 4.2 * x[0]**3 + x[0]**5 + x[1], \ x[0] + 2.0 * x[1] ] ) return df if ( __name__ == '__main__' ): hex2_gradient2 ( )