Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- # -*- coding: utf-8 -*-
- from __future__ import print_function
- import time
- import numpy as np
- import theano
- import theano.tensor as T
- import lasagne
- import matplotlib.pyplot as plt
- def load_dataset():
- np.random.seed(1337)
- X_train = np.random.randn(10000, 16*16)
- X_train = X_train.astype('float32')
- Y_train = np.mean(X_train, 1)
- X_test = np.random.randn(1000, 16*16)
- X_test = X_test.astype('float32')
- Y_test = np.mean(X_test, 1)
- X_train = np.reshape(X_train, (10000, 1, 16, 16))
- X_test = np.reshape(X_test, (1000, 1, 16, 16))
- return X_train, Y_train, X_test, Y_test
- def build_cnn(input_var=None):
- network = lasagne.layers.InputLayer(shape=(None, 1, 16, 16),
- input_var=input_var)
- network = lasagne.layers.Conv2DLayer(
- network, num_filters=20, filter_size=(3, 3),
- nonlinearity=lasagne.nonlinearities.rectify,
- W=lasagne.init.GlorotUniform())
- network = lasagne.layers.DenseLayer(
- network,
- num_units=10,
- nonlinearity=lasagne.nonlinearities.sigmoid)
- network = lasagne.layers.DenseLayer(
- network,
- num_units=1,
- nonlinearity=lasagne.nonlinearities.linear)
- return network
- def iterate_minibatches(inputs, targets, batchsize, shuffle=False):
- assert len(inputs) == len(targets)
- if shuffle:
- indices = np.arange(len(inputs))
- np.random.shuffle(indices)
- for start_idx in range(0, len(inputs) - batchsize + 1, batchsize):
- if shuffle:
- excerpt = indices[start_idx:start_idx + batchsize]
- else:
- excerpt = slice(start_idx, start_idx + batchsize)
- yield inputs[excerpt], targets[excerpt]
- def main(model='cnn', num_epochs=10):
- print("Loading data...")
- X_train, y_train, X_test, y_test = load_dataset()
- input_var = T.tensor4('inputs')
- target_var = T.vector('targets')
- print("Building model and compiling functions...")
- network = build_cnn(input_var)
- prediction = lasagne.layers.get_output(network)
- loss = lasagne.objectives.squared_error(prediction, target_var)
- loss = loss.mean()
- params = lasagne.layers.get_all_params(network, trainable=True)
- updates = lasagne.updates.nesterov_momentum(
- loss, params, learning_rate=0.1, momentum=0.9)
- # updates = lasagne.updates.adam(loss, params)
- test_prediction = lasagne.layers.get_output(network)
- test_loss = lasagne.objectives.squared_error(test_prediction,
- target_var)
- test_loss = test_loss.mean()
- train_fn = theano.function([input_var, target_var], loss, updates=updates)
- val_fn = theano.function([input_var, target_var], test_loss)
- preds = theano.function([input_var], test_prediction)
- print("Starting training...")
- for epoch in range(num_epochs):
- train_err = 0.0
- train_batches = 0
- start_time = time.time()
- for batch in iterate_minibatches(X_train, y_train, 500, shuffle=False):
- inputs, targets = batch
- train_err += train_fn(inputs, targets)
- train_batches += 1
- test_err = 0.0
- test_batches = 0
- for batch in iterate_minibatches(X_test, y_test, 500, shuffle=False):
- inputs, targets = batch
- err = val_fn(inputs, targets)
- test_err += err
- test_batches += 1
- print("Epoch {} of {} took {:.3f}s".format(
- epoch + 1, num_epochs, time.time() - start_time))
- print(" training loss:\t\t{:.6f}".format(train_err / train_batches))
- print(" test loss:\t\t{:.6f}".format(test_err / test_batches))
- pds = preds(X_test)
- plt.scatter(y_test, pds)
- plt.show()
- if __name__ == '__main__':
- main()
Add Comment
Please, Sign In to add comment