Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- # Load data into RAM.
- path = "C:/Users/dream/Desktop/4th Year/Introduction to Deep Learning/Assignment 1/"
- trainxs = npzread(string(path, "Data/data/fashion-train-imgs.npz"))
- trainys = npzread(string(path, "Data/data/fashion-train-labels.npz"))
- devxs = npzread(string(path, "Data/data/fashion-dev-imgs.npz"))
- devys = npzread(string(path, "Data/data/fashion-dev-labels.npz"))
- testxs = npzread(string(path, "Data/data/fashion-test-imgs.npz"))
- testys = npzread(string(path, "Data/data/fashion-test-labels.npz"))
- # Define logistic function:
- function exponentialResponse(image :: Array{Float64, 2}, weights :: Array{Float64, 2}, bias :: Float64)
- pixels = size(image)
- linearCombination = 0
- if pixels == size(weights)
- for i in 1:pixels[1]
- for j in 1:pixels[2]
- linearCombination += image[i, j] * weights[i, j]
- end
- end
- end
- linearCombination += bias
- # print(linearCombination)
- # print(" linear ")
- return(BigFloat(Base.MathConstants.e^linearCombination))
- end
- # Define loss function:
- function loss(weights :: Array{Float64, 2}, bias :: Float64)
- n = size(trainxs, 3)
- sum = 0
- exponentialResp = 0
- for i in 1:n
- exponentialResp = exponentialResponse(trainxs[:,:,i], weights, bias)
- perImageLogisticTerm = exponentialResp/(1 + exponentialResp)
- print(perImageLogisticTerm)
- print(" ")
- term = trainys[i]^2 - 2*trainys[i]*perImageLogisticTerm + perImageLogisticTerm^2
- sum += term
- end
- loss = sum /(2*n)
- return(loss)
- end
- # Compute a specific derivative (for weight j):
- function derivate(weights :: Array{Float64, 2}, bias :: Float64, j :: Array{Int64, 1})
- sumBag = 0
- sumNotBag = 0
- exponentialResp = 0
- if size(j)[1] == 2
- for i in 1:size(trainys)[1]
- if trainys[i] == 0
- exponentialResp = exponentialResponse(trainxs[:,:,i], weights, bias)
- sumNotBag += trainxs[j[1],j[2],i] * exponentialResp / ((1 + exponentialResp)^3)
- else
- exponentialResp = exponentialResponse(trainxs[:,:,i], weights, bias)
- sumBag += trainxs[j[1],j[2],i] * (exponentialResp^2) / ((1 + exponentialResp)^3)
- end
- end
- end
- return(sumNotBag - sumBag)
- end
- function computeAllDerivatives(weights :: Array{Float64, 2}, bias :: Float64)
- weightDerivatives = zeros(size(weights)[1], size(weights)[2])
- for i in 1:size(weights)[1]
- for j in 1:size(weights)[2]
- weightDerivatives[i,j] = derivate(weights, bias, [i, j])
- end
- end
- return weightDerivatives
- end
- print(loss(fill(1.0, size(trainxs)[1:2]), 1.0))
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement