Advertisement
Guest User

Untitled

a guest
Nov 15th, 2019
69
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.92 KB | None | 0 0
  1. subroutine mlrmodel(fvec,n,d,m,c,cgrad)
  2. implicit none
  3. integer, intent(in) :: n,d,m !training data sizes and number of classes
  4. real(kind=8), dimension((m-1)*(n+1)), intent(in) :: fvec !fitting parameters
  5. real(kind=8), intent(out) :: c !cost
  6. real(kind=8), dimension((m-1)*(n+1)), intent(out) :: cgrad !gradient of cost
  7. integer :: i1,j1,i,j,k,y, bias_column
  8. real(kind=8), dimension(m-1,n) :: w
  9. real(kind=8), dimension(m-1) :: b
  10. !Declare other variables as needed
  11.  
  12. real(kind=8), dimension(n) :: x
  13. real(kind=8), dimension(m-1) :: z, a
  14.  
  15. bias_column = (m-1) * (n)
  16.  
  17. !unpack fitting parameters (use if needed)
  18. do i1=1,n
  19. j1 = (i1-1)*(m-1)+1
  20. w(:,i1) = fvec(j1:j1+m-2) !weight matrix
  21. end do
  22. b = fvec((m-1)*n+1:(m-1)*(n+1)) !bias vector
  23.  
  24. ! Add cost and gradient contributions from each training data
  25. do k = 1, d
  26. ! Fetch required training input and label
  27. x = lr_x(:, k)
  28. y = lr_y(k)
  29.  
  30. a = compute_mlr_a(w, b, x, n, m)
  31.  
  32. ! Compute gradient and cost
  33. do i = 1, m - 1
  34. ! y+1 to adjust for indexing
  35. if (y + 1 == i) then
  36. ! Cost log term
  37. c = c - safe_log(a(i))
  38.  
  39. ! Bias gradient with y == i
  40. cgrad(bias_column + i) = cgrad(bias_column + i) - (1 - a(i))
  41.  
  42. ! Weight gradient y == i
  43. do j = 0, n - 1
  44. cgrad(j * (m - 1) + i) = cgrad(j * (m - 1) + i) - (1 - a(i)) * x(j)
  45. end do
  46. else
  47. ! Bias gradient i /= j, y == j
  48. cgrad(i + bias_column) = cgrad(i + bias_column) + a(i)
  49.  
  50. ! Weight gradient i /= j, y == j
  51. do j = 0, n - 1
  52. cgrad(j * (m - 1) + i) = cgrad(j * (m - 1) + i) + a(i) * x(j)
  53. end do
  54. end if
  55. end do
  56. end do
  57.  
  58. do i = 0, n - 1
  59. do j = 1, m - 1
  60. ! Add penalty terms
  61. cgrad(i * (m - 1) + j) = cgrad(i * (m-1) + j) + w(j, i)
  62. c = c + lr_lambda * w(j,i) * w(j,i)
  63. end do
  64. end do
  65. end subroutine mlrmodel
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement