Advertisement
Guest User

org-mode-lab-example

a guest
Dec 14th, 2018
70
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 4.98 KB | None | 0 0
  1. #+TITLE: ОТЧЕТ ПО ПРАКТИЧЕСКОЙ РАБОТЕ № 7
  2. #+AUTHOR: Чащинский Н.И.
  3. #+MACRO: GROUP ИВБО-03-16
  4. #+MACRO: DEPARTMENT Вычислительной Техники
  5. #+MACRO: SUBJECT Теория принятия решений
  6. #+MACRO: THEMETITLE Тема практической работы:
  7. #+MACRO: THEME Методы безусловной оптимизации 1-го порядка: Метод наискорейшего спуска
  8. #+MACRO: TIMETITLE Практическая работа выполнена \today
  9. #+MACRO: TEACHER доцент Сорокин А.Б.
  10. #+INCLUDE: ~/BORG/common/header.org
  11.  
  12. * Задание
  13. Вариант №9
  14. \begin{equation}
  15. f(\vec{x}) = 1.8x_1^2 + 1.8x_1x_2 + 2x_2^2 - 1.1x_1 + 2.3x_2 \rightarrow \text{min}
  16. \end{equation}
  17. \begin{align*}
  18. \vec{x}^{(0)} &= (2, 3)\\
  19. \vec{x}^* &= (0.7652, -0.9194)\\
  20. f^* &= -1.478\\
  21. \epsilon &= 0.0001\\
  22. \end{align*}
  23. * Формулы
  24. \begin{equation*}
  25. f_k = f(\vec{x}^{(k)})
  26. \end{equation}
  27. \begin{equation}
  28. \vec{x}^{(k+1)} = \vec{x}^{(k)} - h_k \nabla f_k
  29. \end{equation}
  30. \begin{equation}
  31. h_k = \frac{(\nabla f_k, \nabla f_k)}{(H\vec{x}^{(k)} \nabla f_k, \nabla f_k)}
  32. \end{equation}
  33. * Решение
  34. #+call: shortest_descent()
  35.  
  36. #+RESULTS:
  37. | k | x1 | x2 | f | grad norm |
  38. |----+---------+---------+---------+-----------|
  39. | 0 | 2.0 | 3.0 | 40.7 | 21.2758 |
  40. | 1 | -1.2135 | -2.0018 | 11.7682 | 12.0239 |
  41. | 2 | 1.2592 | 0.1492 | 2.1946 | 6.353 |
  42. | 3 | 0.2414 | -1.2705 | -0.4064 | 3.4426 |
  43. | 4 | 0.924 | -0.6341 | -1.1884 | 1.7927 |
  44. | 5 | 0.6276 | -1.0238 | -1.3964 | 0.9539 |
  45. | 6 | 0.8125 | -0.8438 | -1.4563 | 0.4937 |
  46. | 7 | 0.7292 | -0.9492 | -1.4721 | 0.2601 |
  47. | 8 | 0.7788 | -0.8994 | -1.4765 | 0.1343 |
  48. | 9 | 0.7558 | -0.9277 | -1.4777 | 0.0703 |
  49. | 10 | 0.769 | -0.9141 | -1.478 | 0.0362 |
  50. | 11 | 0.7628 | -0.9217 | -1.4781 | 0.0189 |
  51. | 12 | 0.7663 | -0.918 | -1.4781 | 0.0097 |
  52. | 13 | 0.7646 | -0.92 | -1.4781 | 0.0051 |
  53. | 14 | 0.7655 | -0.919 | -1.4781 | 0.0026 |
  54. | 15 | 0.7651 | -0.9195 | -1.4781 | 0.0014 |
  55. | 16 | 0.7653 | -0.9193 | -1.4781 | 0.0007 |
  56. | 17 | 0.7652 | -0.9194 | -1.4781 | 0.0004 |
  57. | 18 | 0.7653 | -0.9193 | -1.4781 | 0.0002 |
  58. | 19 | 0.7652 | -0.9194 | -1.4781 | 0.0001 |
  59.  
  60. * Код
  61. #+NAME: shortest_descent
  62. #+BEGIN_SRC python
  63. import numpy as np
  64.  
  65. coefs = [1.8, 1.8, 2, -1.1, 2.3]
  66.  
  67. def f(x):
  68. c = coefs
  69. c0 = c[0]*x[0]**2
  70. c1 = c[1]*x[0]*x[1]
  71. c2 = c[2]*x[1]**2
  72. c3 = c[3]*x[0]
  73. c4 = c[4]*x[1]
  74. return c0 + c1 + c2 + c3 + c4
  75.  
  76. def grad(x):
  77. c = coefs
  78. x0 = 2*c[0]*x[0] + c[1]*x[1] + 0*c[2] + c[3] + 0*c[4]
  79. x1 = 0*c[0] + c[1]*x[0] + 2*c[2]*x[1] + 0*c[3] + c[4]
  80. return np.array([x0, x1])
  81.  
  82. def hesse():
  83. c = coefs
  84. row0 = [c[0], c[1]]
  85. row1 = [c[1], c[2]]
  86. return np.array([row0, row1])
  87.  
  88. def h_k(x):
  89. g = grad(x)
  90. H = hesse()
  91. return (np.dot(g, g) / np.dot(H.dot(g), g))
  92.  
  93. def step(x):
  94. h = h_k(x)
  95. return x - h*grad(x)
  96.  
  97. def optimize(x, epsilon):
  98. report = []
  99. g = grad(x)
  100. grad_norm = np.sqrt(g.dot(g))
  101. k = 0
  102. row = [k, x[0], x[1], f(x), grad_norm]
  103. row = [round(c, 4) for c in row]
  104. report.append(row)
  105. while grad_norm > epsilon:
  106. h = h_k(x)
  107. x -= h * g
  108. g = grad(x)
  109. grad_norm = np.sqrt(g.dot(g))
  110. k += 1
  111. row = [k, x[0], x[1], f(x), grad_norm]
  112. row = [round(c, 4) for c in row]
  113. report.append(row)
  114. header = ['k', 'x1', 'x2', 'f', 'grad norm']
  115. report.insert(0, header)
  116. report.insert(1, None)
  117. return report
  118.  
  119. x = np.array([2.0, 3.0])
  120. epsilon = 0.0001
  121. report = optimize(x, epsilon)
  122. return report
  123. #+END_SRC
  124.  
  125. #+RESULTS: shortest_descent
  126. | k | x1 | x2 | f | grad norm |
  127. |----+---------+---------+---------+-----------|
  128. | 0 | 2.0 | 3.0 | 40.7 | 21.2758 |
  129. | 1 | -1.2135 | -2.0018 | 11.7682 | 12.0239 |
  130. | 2 | 1.2592 | 0.1492 | 2.1946 | 6.353 |
  131. | 3 | 0.2414 | -1.2705 | -0.4064 | 3.4426 |
  132. | 4 | 0.924 | -0.6341 | -1.1884 | 1.7927 |
  133. | 5 | 0.6276 | -1.0238 | -1.3964 | 0.9539 |
  134. | 6 | 0.8125 | -0.8438 | -1.4563 | 0.4937 |
  135. | 7 | 0.7292 | -0.9492 | -1.4721 | 0.2601 |
  136. | 8 | 0.7788 | -0.8994 | -1.4765 | 0.1343 |
  137. | 9 | 0.7558 | -0.9277 | -1.4777 | 0.0703 |
  138. | 10 | 0.769 | -0.9141 | -1.478 | 0.0362 |
  139. | 11 | 0.7628 | -0.9217 | -1.4781 | 0.0189 |
  140. | 12 | 0.7663 | -0.918 | -1.4781 | 0.0097 |
  141. | 13 | 0.7646 | -0.92 | -1.4781 | 0.0051 |
  142. | 14 | 0.7655 | -0.919 | -1.4781 | 0.0026 |
  143. | 15 | 0.7651 | -0.9195 | -1.4781 | 0.0014 |
  144. | 16 | 0.7653 | -0.9193 | -1.4781 | 0.0007 |
  145. | 17 | 0.7652 | -0.9194 | -1.4781 | 0.0004 |
  146. | 18 | 0.7653 | -0.9193 | -1.4781 | 0.0002 |
  147. | 19 | 0.7652 | -0.9194 | -1.4781 | 0.0001 |
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement