Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- for i in range(param_niter):
- # prolaz unaprijed
- s1 = np.dot(X, W1.transpose()) + b1 # N x h
- h1 = np.vectorize(relu)(s1) # N x h
- s2 = np.dot(h1, W2.transpose()) + b2 # N x C
- expscores = np.vectorize(math.exp)(s2) # N x C
- sumexp = np.sum(expscores, axis = 1) # N x 1
- # logaritmirane vjerojatnosti razreda
- probs = expscores / sumexp[:, None] # N x C
- logprobs = np.vectorize(math.log)(probs) # N x C
- # gubitak
- loss = np.sum([ -logprobs[i][Y_[i]] for i in range(N)]) # scalar
- # dijagnostički ispis
- if i % 10 == 0:
- print("iteration {}: loss {}".format(i, loss))
- # prolaz unazad
- # derivacije komponenata gubitka po rezultatu
- Ycode = [ [1 if Y_[i] == j else 0 for j in range(C)] for i in range(N) ]
- Gs2 = probs - Ycode # N x C
- grad_W2 = 1./N * np.dot(Gs2.transpose(), h1) # C x h
- grad_b2 = 1./N * np.sum(Gs2.transpose(), axis = 1) # C x 1
- Gh1 = np.dot(Gs2, W2) # N x h
- Gh1[h1 == 0] = 0 # N x h
- Gs1 = Gh1
- grad_W1 = 1./N * np.dot(Gs1.transpose(), X) # h x D
- grad_b1 = 1./N * np.sum(Gs1.transpose(), axis = 1) # h x 1
- # poboljšani parametri
- W1 += -param_delta * grad_W1
- b1 += -param_delta * grad_b1
- W2 += -param_delta * grad_W2
- b2 += -param_delta * grad_b2
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement