Advertisement
Guest User

Untitled

a guest
Jul 25th, 2016
57
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 2.44 KB | None | 0 0
  1. rm(list=ls())
  2.  
  3. # Load MXNet
  4. require(mxnet)
  5.  
  6. # Train test datasets
  7. train <- read.csv("train_28.csv")
  8. test <- read.csv("test_28.csv")
  9.  
  10. # Fix train and test datasets
  11. train <- data.matrix(train)
  12. train_x <- t(train[,-1])
  13. train_y <- train[,1]
  14. train_array <- train_x
  15. dim(train_array) <- c(28, 28, 1, ncol(train_x))
  16.  
  17. test__ <- test
  18. test_x <- t(test[,-1])
  19. test_y <- test[,1]
  20. test_array <- test.x
  21. dim(test_array) <- c(28, 28, 1, ncol(test_x))
  22.  
  23. # Model
  24. data <- mx.symbol.Variable('data')
  25. # 1st convolutional layer 5x5 kernel and 20 filters.
  26. conv_1 <- mx.symbol.Convolution(data= data, kernel = c(5,5), num_filter = 20)
  27. tanh_1 <- mx.symbol.Activation(data= conv_1, act_type = "tanh")
  28. pool_1 <- mx.symbol.Pooling(data = tanh_1, pool_type = "max", kernel = c(2,2), stride = c(2,2))
  29. # 2nd convolutional layer 5x5 kernel and 50 filters.
  30. conv_2 <- mx.symbol.Convolution(data = pool_1, kernel = c(5,5), num_filter = 50)
  31. tanh_2 <- mx.symbol.Activation(data = conv_2, act_type = "tanh")
  32. pool_2 <- mx.symbol.Pooling(data = tanh_2, pool_type = "max", kernel = c(2,2), stride = c(2,2))
  33. # 1st fully connected layer
  34. flat <- mx.symbol.Flatten(data = pool2)
  35. fcl_1 <- mx.symbol.FullyConnected(data = flat, num_hidden = 500)
  36. tanh_3 <- mx.symbol.Activation(data = fcl_1, act_type = "tanh")
  37. # 2nd fully connected layer
  38. fcl_2 <- mx.symbol.FullyConnected(data = tanh_3, num_hidden = 10)
  39. # Output
  40. NN_model <- mx.symbol.SoftmaxOutput(data = fcl_2)
  41.  
  42. # Set seed for reproducibility
  43. mx.set.seed(100)
  44.  
  45. # Device used. Sadly not the GPU :-(
  46. device <- mx.cpu()
  47.  
  48. # Train on 1200 samples
  49. model <- mx.model.FeedForward.create(NN_model, X = train_array, y = train_y,
  50. ctx = device,
  51. num.round = 30,
  52. array.batch.size = 100,
  53. learning.rate = 0.05,
  54. momentum = 0.9,
  55. wd = 0.00001,
  56. eval.metric = mx.metric.accuracy,
  57. epoch.end.callback = mx.callback.log.train.metric(100))
  58.  
  59. # Test on 312 samples
  60. predict_probs <- predict(model, test.array)
  61. predicted_labels <- max.col(t(predict_probs)) - 1
  62. table(test__[,1], predicted_labels)
  63. sum(diag(table(test__[,1], predicted_labels)))/312
  64.  
  65. ##############################################
  66. # Output
  67. ##############################################
  68. # pred.label
  69. # 0 1
  70. # 0 83 47
  71. # 1 34 149
  72. #
  73. #
  74. # [1] 0.7412141
  75. #
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement