I have written this code which performs the Gradient descent for a function.
I'm trying to simplify this code and show the "final solution" for 3 different initial values. If you look at my code, you'll see at the bottom there are the variables u and v. What I'm trying to accomplish is to get my final solution to look like this > Final solution #1 = ... Final solution #2 = ... Final Solution #3 = ...
Any ideas or solutions for this would be great. I'm running into errors with every variation I attempt.
import numpy as np
def g(x):
return (x[0]2)**4 + (x[0]2*x[1])**2
def g1(x):
grad = np.array([0.0,0.0])
grad[0] = 4*x[0]2**3
grad[1] = 4*(x[0]2*x[1])
return grad
def back_track_1s(x):
eta = 1.0
beta = 0.1
gamma = 0.1
while g(x  eta*g1(x)) > g(x)  gamm*eta*np.dot(g1(x), g1(x)):
eta = beta*eta
return eta
def grad_desc_2D(f, f1, xinit, step_size, tol, max_iter):
x_new = np.array([0.0,0.0])
x_old = np.array([0.0,0.0])
gradient = np.array([10.0,10.0])
move = np.array([0.0,0.0])
x_new = x_init
iter_ctr = 0
while np.linalg.norm(x_new  x_old) > tol and iter_ctr < max_iter:
x_old = x_new
gradient = f1(x_old)
move = gradient * step_size
x_new = x_old  move
iter_ctr = iter_ctr + 1
return x_new, iter_ctr
u = np.array([4,3,2])
v = np.array([5,6,7])
x_init = np.array([u,v])
x_f = np.array([0.0,0.0])
step_size = 0.0025
tolerance = 0.0001
max_iter = 100
x_f, iter_ctr = grad_desc_2D(g, gl, x_init, step_size, tolerance, max_iter)
print("Final Solution is: ", x_f)
print("Functional evalutation is: ", g(x_f))
print("Iteration count: ", iter_ctr)
