Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import numpy as np
- import math
- import pandas as pd
- from scipy import optimize
- eps=0.000001
- h=0.0001
- def f(ar):
- x = ar[0]
- y = ar[1]
- z = ar[2]
- return x**2+8*y**2+0.001*x*y-x-y
- def hiper():
- return (np.array([2,1,3]),1)
- def dFx(ar):
- x = ar[0]
- y = ar[1]
- z = ar[2]
- return (f(x+h,y,z)-f(x-h,y,z))/(2*h)
- def dFy(ar):
- x = ar[0]
- y = ar[1]
- z = ar[2]
- return (f(x,y+h,z)-f(x,y-h,z))/(2*h)
- def dFz(ar):
- x = ar[0]
- y = ar[1]
- z = ar[2]
- return (f(x,y,z+h)-f(x,y,z-h))/(2*h)
- def gradF(ar):
- return np.array([dFx(ar), dFy(ar),dFz(ar)], float)
- def gradSopr(x,y,z):
- #1 step
- xkj = np.array([x,y,z], float)
- k=0
- #2 step
- j=0
- skj = -gradF(xkj)
- #3 step
- while True:# Минимизацию нужно заменить на Golden Selection
- lmbd = optimize.minimize(lambda l: f(xkj+l*skj), [0,1,0], method='nelder-mead').x #[0,1,0] - начальная точка
- xkj1 = xkj + lmbd*skj
- omega = (np.linalg.norm(gradF(xkj1))**2)/(np.linalg.norm(gradF(xkj))**2)
- skj1 = gradF(xkj1) + omega*skj
- if np.linalg.norm(skj1)<eps or np.linalg.norm(xkj1 - xkj)<eps:
- break
- elif j<n:
- j++
- else:
- xkj = xkj1
- j=0
- skj = -gradF(xkj)
- k++
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement