Advertisement
Guest User

Untitled

a guest
Oct 21st, 2016
65
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 2.21 KB | None | 0 0
  1. #!/bin/env python
  2.  
  3. # Just playing with theano. This doesn't do anything particularly useful
  4. # other than showing how things work
  5.  
  6. import numpy
  7. import theano
  8. import theano.tensor as T
  9. from theano import function
  10.  
  11. # Creates variables and the operations
  12. x = T.dscalar()
  13. y = T.dmatrix('y')
  14. z = x + y
  15.  
  16. # List of parameters followed by the gra
  17. f = function([x, y], z) # Compiles an executable function
  18.  
  19. print(f(2, [[1, 2], [3,4]]))
  20.  
  21. # Logistic equation
  22. x = T.dmatrix('x')
  23. s = 1 / (1 + T.exp(-x)) # 1 / (1 + e^-x)
  24. logistic = function([x], s)
  25.  
  26. m_1 = [[1, 1], [2, 2]]
  27. m_2 = [[1, 1], [2, 3]]
  28.  
  29. print(logistic(m_1))
  30. print(logistic(m_2))
  31.  
  32. # Create multiple matrices simultaneously
  33. a, b = T.dmatrices('a', 'b')
  34.  
  35. # bunch of functions
  36. diff = a - b
  37. abs_diff = abs(diff)
  38. diff_sqr = diff * diff
  39.  
  40. f = function([a, b], [diff, abs_diff, diff_sqr]) # Will execute all three in one shot
  41.  
  42. # Shared variables
  43. # ------------------------
  44. from theano import shared
  45.  
  46. state = shared(0) # shared variable
  47. inc = T.iscalar('inc') # Incrementer variable
  48.  
  49. # one tuple in updates for every shared variable
  50. f = function([inc], state, updates=[(state, state+inc)])
  51.  
  52. # Full Logistic regression
  53. # ---------------------------
  54.  
  55. rng = numpy.random
  56. N = 400 # Number of rows
  57. features = 700 # Features
  58.  
  59. # The dataset
  60. D = (rng.randn(N, features), rng.randint(size=N, low=0, high=2))
  61.  
  62. training_steps = 10000
  63.  
  64. x = T.dmatrix('x')
  65. y = T.dvector('y')
  66.  
  67. w = shared(rng.randn(features), name='weights')
  68. # Don't forget the decimal, it will be an integer otherwise and mess everything up
  69. b = shared(0., name='b')
  70.  
  71. # Predictions
  72. p_1 = 1 / (1 + T.exp(-T.dot(x, w) - b))
  73. prediction = p_1 > 0.5
  74.  
  75. # cross entropy
  76. xent = -y * T.log(p_1) - (1 - y) * T.log(1 - p_1)
  77.  
  78. # cost function to effect the gradients -- minimize cost
  79. cost = xent.mean() + 0.01 * (w ** 2).sum()
  80.  
  81. # Gradients for weights and bias
  82. gw, gb = T.grad(cost, [w, b])
  83.  
  84. # Put it all together
  85. train = function(inputs=[x, y], outputs=[prediction, xent], updates=[(w, w - 0.1 * gw), (b, b - 0.1 * gb)])
  86. predict = function(inputs=[x], outputs=[prediction])
  87.  
  88. # Now we need to run it -- train!
  89. # We'll ignore the prediction and error outputs for now
  90. for i in range(training_steps):
  91. pred, err = train()
  92.  
  93. # Now predict -- run it in a useful way
  94. prediction(D[0])
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement