Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- # Imports and setup here
- # ...
- def forward(image, label):
- # Implementation excluded
- # ...
- def train(im, label, lr=.005):
- '''
- Completes a full training step on the given image and label.
- Returns the cross-entropy loss and accuracy.
- - image is a 2d numpy array
- - label is a digit
- - lr is the learning rate
- '''
- # Forward
- out, loss, acc = forward(im, label)
- # Calculate initial gradient
- gradient = np.zeros(10)
- gradient[label] = -1 / out[label]
- # Backprop
- gradient = softmax.backprop(gradient, lr)
- # TODO: backprop MaxPool2 layer
- # TODO: backprop Conv3x3 layer
- return loss, acc
- print('MNIST CNN initialized!')
- # Train!
- loss = 0
- num_correct = 0
- for i, (im, label) in enumerate(zip(train_images, train_labels)):
- if i > 0 and i % 99 == 0:
- print(
- '[Step %d] Past 100 steps: Average Loss %.3f | Accuracy: %d%%' %
- (i + 1, loss / 100, num_correct)
- )
- loss = 0
- num_correct = 0
- l, acc = train(im, label)
- loss += l
- num_correct += acc
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement