# -*- coding: utf-8 -*-
"""created on wed mar 20 21:56:21 2019
@author: zhangchaoyu
""""""
當離最優值較遠時,利用梯度法,接近極小點時用收斂較快的其他方法
"""import math
import copy
"""1、f(x,y) = x*x + 25*y*y
2、f(x,y) = (x-1)(x-1) + (y-1)(y-1)
"""def gradient(x):
x = x[0]
y = x[1]
#return [2*x, 50*y]
return [2*x-2, 2*y-2]
def hessian():
#return [[2,0],[0,50]]
return[[2,0],[0,2]]
#採用最佳步長,梯度*梯度/(梯度*海森*梯度)
def step_best(g, h, p):
num1 = multiplication_m_and_m([g], trans([p]))[0][0]
num2 = multiplication_m_and_m(multiplication_m_and_m([p], h), trans([p]))[0][0]
return -num1/num2
def alpha_conjugate(g, h, p):
num1 = multiplication_m_and_m(multiplication_m_and_m([g], h), trans([p]))[0][0]
num2 = multiplication_m_and_m(multiplication_m_and_m([p], h), trans([p]))[0][0]
return num1/num2
def multiplication_m_and_v(h, g):
p =
for i in range(len(h)):
return p
def multiplication_m_and_m(a, b):
c =
for i in range(len(a)):
temp =
for j in range(len(b[0])):
return c
def trans(a):
return [[a[j][i] for j in range(len(a))] for i in range(len(a[0]))]
#梯度法,這裡步長取得規則:下一次梯度與這一次垂直
def gradient(x0):
x = x0
g = gradient(x)
while math.sqrt(g[0]*g[0]+g[1]*g[1]) > 1e-5:
h = hessian()
step = step_best(g, h, g)
x = [x[i]+step*g[i] for i in range(len(x))]
g = gradient(x)
print(x)
return x
#共軛梯度法
#f(x,y) = 1.5x*x + 0.5y*y - xy - 2x
def gradient_conjugate(x0):
x = x0
g = gradient(x)
p = [-g[i] for i in range(len(g))]
h = hessian()
while math.sqrt(g[0]*g[0]+g[1]*g[1]) > 1e-5:
step = step_best(g, h, p)
x = [x[i]+step*p[i] for i in range(len(x))]
g = gradient(x)
alpha = alpha_conjugate(g, h, p)
p = [-g[i]+alpha*p[i] for i in range(len(p))]
print(x)
return x
x0 = [2,2]
x0 = [-2,4]
x0 = [100,50000]
x0 = [0,0]
x = gradient(x0)
x = gradient_conjugate(x0)
python實現共軛梯度法
共軛梯度法 conjugate gradient 是介於最速下降法與牛頓法之間的乙個方法,它僅需利用一階導數資訊,但克服了最速下降法收斂慢的缺點,又避免了牛頓法需要儲存和計算hesse矩陣並求逆的缺點,共軛梯度法不僅是解決大型線性方程組最有用的方法之一,也是解大型非線性最優化最有效的演算法之一。這裡...
python實現共軛梯度法
共軛梯度法是介於最速下降法與牛頓法之間的乙個方法,它僅需利用一階導數資訊,但克服了最速下降法收斂慢的缺點,又避免了牛頓法需要儲存和計算hesse矩陣並求逆的缺點,共軛梯度法不僅是解決大型線性方程組最www.cppcns.com有用的方法之一,也是解大型非線性最優化最有效的演算法之一。在各種優化演算法...
21 梯度運算
一張分別做膨脹與腐蝕操作,最後用膨脹後的減去腐蝕的,得到的新便是該的梯度運算。即 梯度運算 img 膨脹 img 腐蝕 img 通過梯度運算可以得到前景物體的輪廓。dst cv2.morphologyex src,cv2.morph gradient,kernel src 需要處理的影象 cv2.m...