Advertisement
Misipuk

MO_Lab4

Apr 29th, 2020
488
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 1.37 KB | None | 0 0
  1. import numpy as np
  2. import math
  3. import pandas as pd
  4. from scipy import optimize
  5.  
  6. eps=0.000001
  7. h=0.0001
  8.  
  9. def f(ar):
  10.     x = ar[0]
  11.     y = ar[1]
  12.     z = ar[2]
  13.     return x**2+8*y**2+0.001*x*y-x-y
  14.  
  15. def hiper():
  16.     return (np.array([2,1,3]),1)
  17.  
  18. def dFx(ar):
  19.     x = ar[0]
  20.     y = ar[1]
  21.     z = ar[2]
  22.     return (f(x+h,y,z)-f(x-h,y,z))/(2*h)
  23.  
  24. def dFy(ar):
  25.     x = ar[0]
  26.     y = ar[1]
  27.     z = ar[2]
  28.     return (f(x,y+h,z)-f(x,y-h,z))/(2*h)
  29.  
  30. def dFz(ar):
  31.     x = ar[0]
  32.     y = ar[1]
  33.     z = ar[2]
  34.     return (f(x,y,z+h)-f(x,y,z-h))/(2*h)
  35.  
  36. def gradF(ar):
  37.     return np.array([dFx(ar), dFy(ar),dFz(ar)], float)
  38.  
  39.    
  40. def gradSopr(x,y,z):
  41.     #1 step
  42.     xkj = np.array([x,y,z], float)
  43.     k=0
  44.     #2 step
  45.     j=0
  46.     skj = -gradF(xkj)
  47.     #3 step
  48.     while True:# Минимизацию нужно заменить на Golden Selection
  49.         lmbd = optimize.minimize(lambda l: f(xkj+l*skj), [0,1,0], method='nelder-mead').x   #[0,1,0] - начальная точка
  50.         xkj1 = xkj + lmbd*skj
  51.         omega = (np.linalg.norm(gradF(xkj1))**2)/(np.linalg.norm(gradF(xkj))**2)
  52.         skj1 = gradF(xkj1) + omega*skj
  53.         if np.linalg.norm(skj1)<eps or np.linalg.norm(xkj1 - xkj)<eps:
  54.             break
  55.         elif j<n:
  56.             j++
  57.         else:
  58.             xkj = xkj1
  59.             j=0
  60.             skj = -gradF(xkj)
  61.             k++
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement