使用python 梯度下降 寻找极值

from mpl_toolkits.mplot3d import Axes3D
import numpy as np
import matplotlib.pyplot as plt
from numpy import *

fig = plt.figure()
ax = Axes3D(fig)
X = np.arange(-4, 4, 0.25)
Y = np.arange(-4, 4, 0.25)
X, Y = np.meshgrid(X, Y)

R = np.sqrt(X**2 + Y**2)

Z = np.sqrt(X**2 + Y**2)

Z = X*X+1+2*X+Y*Y
ax.set_xlabel(‘x’)
ax.set_ylabel(‘y’)
ax.set_zlabel(‘z’)

具体函数方法可用 help(function) 查看,如:help(ax.plot_surface)

ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap=’rainbow’)

ax.plot_surface(X, Y, Z, cmap=’rainbow’)
plt.show()

(x-3) (y-1)

def f(x,y):
return x * x - 6 * x + 9 + y * y -2*y+1

def g_funx(x):
return 2 * x-6

def g_funy(y):
return 2 * y-2

tx = []
ty = []
g_gdx = []
g_gdy = []
def gd(x_start,y_start,step,g_funx,g_funy):
global tx,g_gd
tx.clear()
ty.clear()
g_gdx.clear()
g_gdy.clear()
x = x_start
y = y_start
print(‘leran rate ‘,step,’init x’,x_start,’init y’,y_start)
for i in range(10000):
gradx = g_funx(x)
x -= gradx * step
grady = g_funy(y)
y -= grady * step

    g_gdx.append(gradx)
    g_gdy.append(grady)
    tx.append(x)
    ty.append(y)
    #print '[ Epoch {0} ] grad = {1}, x = {2}'.format(i, grad, x)
    #print('[ Epoch = ] ',i,'gradx =',gradx,'grady =',grady,'x=',x,'y=',y)
    if abs(gradx) < 1e-3:
        break;
    if abs(grady) < 1e-3:
        break;
print('[ Epoch = ] ',i,'gradx =',gradx,'grady =',grady,'x=',x,'y=',y)
return x

def show_grad_result(initx,inity,leran_rate):
global tx,g_gd
gd(initx,inity,leran_rate,g_funx,g_funy)
plt.plot(tx)
plt.xlabel(“result”)
plt.show()
plt.plot(g_gd)
plt.xlabel(“grad”)
plt.show()

show_grad_result(4,2,0.01)

猜你喜欢

转载自blog.csdn.net/wdjjwb/article/details/79324966