Advertisement
Guest User

Untitled

a guest
Jul 25th, 2016
63
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 4.17 KB | None | 0 0
  1. import lasagne.layers as layers
  2. import numpy as np
  3. import theano.tensor as T
  4. from lasagne import nonlinearities
  5. from lasagne.objectives import aggregate, categorical_crossentropy, squared_error
  6. from lasagne.regularization import l1, regularize_layer_params
  7. from nolearn.lasagne import NeuralNet
  8.  
  9. ########################################################
  10. ## Load the "Dataset"
  11. ########################################################
  12. N = 100
  13. data = np.zeros((N, 3, 128, 128), np.float32) # Fake Image Dataset.
  14. cls = np.random.randint(0, 10, N).astype(np.float32) # Classes for images
  15. reg = np.random.random(N).astype(np.float32) # Some regression value
  16. targets = np.stack([cls, reg], axis=1)
  17.  
  18. ########################################################
  19. ## Define The Network
  20. ########################################################
  21. prop = dict(nonlinearity=nonlinearities.leaky_rectify)
  22. l_in = layers.InputLayer((None, 3, 128, 128), name="input")
  23.  
  24. # Shared Convolutions
  25. l_conv1 = layers.Conv2DLayer(l_in, num_filters=32, filter_size=5, name="shared_conv1", **prop)
  26. l_pool1 = layers.Pool2DLayer(l_conv1, pool_size=2, name="shared_pool1")
  27. l_conv2 = layers.Conv2DLayer(l_pool1, num_filters=64, filter_size=3, name="shared_conv2", **prop)
  28. l_pool2 = layers.Pool2DLayer(l_conv2, pool_size=2, name="shared_pool2")
  29. l_conv3 = layers.Conv2DLayer(l_pool2, num_filters=128, filter_size=3, name="shared_conv3", **prop)
  30. l_pool3 = layers.Pool2DLayer(l_conv3, pool_size=2, name="shared_pool3")
  31.  
  32. # Task 1 - 10 Category Classifier
  33. l_tsk1 = layers.DenseLayer(l_pool3, name="task1_d1", num_units=45, **prop)
  34. l_tsk1 = layers.DenseLayer(l_tsk1, name="task1_d2", num_units=64, **prop)
  35. l_tsk1 = layers.DenseLayer(l_tsk1, name="task1_out", num_units=10, nonlinearity=nonlinearities.softmax)
  36.  
  37. # Task 2 - 1 Regression
  38. l_tsk2 = layers.DenseLayer(l_pool3, name="task2_d1", num_units=54, **prop)
  39. l_tsk2 = layers.DenseLayer(l_tsk2, name="task2_d2", num_units=64, **prop)
  40. l_tsk2 = layers.DenseLayer(l_tsk2, name="task2_out", num_units=1, nonlinearity=nonlinearities.sigmoid)
  41.  
  42. out_layers = [l_tsk1, l_tsk2]
  43.  
  44.  
  45. ########################################################
  46. ## Write a custom Objective
  47. ########################################################
  48. def cls_regression(layers_, target, **kwargs):
  49. output_kw = kwargs.pop('get_output_kw', {})
  50. det = kwargs.pop('deterministic', False)
  51.  
  52. l1_penalty = kwargs.pop("l1", 0)
  53. cls_lmb = kwargs.pop("cls_lambda", 1)
  54. reg_lmb = kwargs.pop("reg_lambda", 1)
  55.  
  56. cls_layer = layers_['task1_out']
  57. reg_layer = layers_['task2_out']
  58.  
  59. # Get the outputs
  60. out_cls, out_reg = layers.get_output([cls_layer, reg_layer], deterministic=det, **output_kw)
  61.  
  62. # Get the targets
  63. gt_cls = T.cast(target[:, 0], 'int32')
  64. gt_reg = target[:, 1].reshape((-1, 1))
  65.  
  66. # Calculate the multi task loss
  67. cls_loss = cls_lmb * aggregate(categorical_crossentropy(out_cls, gt_cls))
  68. reg_loss = reg_lmb * aggregate(squared_error(out_reg, gt_reg))
  69. loss = cls_loss + reg_loss
  70.  
  71. if l1_penalty:
  72. loss += l1_penalty * regularize_layer_params(layers_.values(), l1)
  73.  
  74. return loss
  75.  
  76.  
  77. ########################################################
  78. ## Instantiate the Network The Network
  79. ########################################################
  80. network = NeuralNet(layers=out_layers,
  81.  
  82. regression=True, # <=== Its probably easier to leave as regression, and do any argmax manually
  83. custom_scores=[], # <=== Custom scores will only be passed the output of out_layers[-1]
  84. y_tensor_type=None, # <===Output type of out_layers[-1]
  85. max_epochs=200,
  86. objective=cls_regression,
  87. update_learning_rate=0.1,
  88. update_momentum=0.9,
  89. verbose=True,
  90. )
  91.  
  92. # fit will correctly treat as a multi output network.
  93. # Everything else will treat as a single output network with output = out_layers[-1]
  94. network.fit(data, targets,epochs=1)
  95.  
  96. # This will only score out_layers[-1], targets must only include values for out_layers[-1]
  97. sc = network.score(data[10:15], targets[10:15,1])
  98. print(sc)
  99.  
  100. # This output here will only include outputs from out_layers[-1]
  101. p = network.predict(data[10:15])
  102. print(p.shape)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement