Advertisement
Guest User

Untitled

a guest
Jun 25th, 2019
103
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
MatLab 4.19 KB | None | 0 0
  1. clear all
  2. close all
  3.  
  4. Data=importfile('fashion-mnist_train.csv',2,inf);
  5. input = Data(:,2:end);
  6. target= Data(:,1);
  7.  
  8. Data=importfile('fashion-mnist_test.csv',2,inf);
  9. input_test = Data(:,2:end);
  10. target_test= Data(:,1);
  11.  
  12. clearvars Data;
  13.  
  14. target_string={0 'T-shirt/top';
  15. 1 'Trouser';
  16. 2 'Pullover';
  17. 3 'Dress';
  18. 4 'Coat';
  19. 5 'Sandal';
  20. 6 'Shirt';
  21. 7 'Sneaker';
  22. 8 'Bag';
  23. 9 'Ankle boot'};
  24.  
  25. figure;
  26. colormap(gray);
  27.  
  28. perm = randperm(60000,20);
  29. for i = 1:20
  30.     subplot(4,5,i);
  31.     display_array=reshape(input(perm(i),:),[28 28]);
  32.     imshow(display_array',[]);
  33.     xlabel(target_string{target(perm(i))+1,2});
  34. end
  35.  
  36. % A partir deste ponto os dados já estão importados para o ambiente.
  37. % Entretanto você tem de atentar que:
  38. %     - as entradas e saídas para uma rede neural no MATLAB estão nas linhas
  39. % da matriz;
  40. %     - se o problema é de classificação é desejável codificar as
  41. % saídas categóricas usando o esquema "onehot" onde cada neurônio da camada
  42. % de saída codifica uma dada categoria.
  43.  
  44. % esquema onehot para a entrada
  45.  
  46. train_output = zeros(10, 60000);
  47. test_output  = zeros(10, 10000);
  48. confusion_matrix = zeros(10, 60000);
  49. confusion_test_matrix = zeros(10, 10000);
  50.  
  51. for i = 1:60000
  52.     if target(i, 1) == 0
  53.         train_output(1, i) = 1;
  54.        
  55.     elseif target(i, 1) == 1
  56.         train_output(2, i) = 1;
  57.        
  58.     elseif target(i, 1) == 2
  59.         train_output(3, i) = 1;
  60.        
  61.     elseif target(i, 1) == 3
  62.         train_output(4, i) = 1;
  63.        
  64.     elseif target(i, 1) == 4
  65.         train_output(5, i) = 1;
  66.        
  67.     elseif target(i, 1) == 5
  68.         train_output(6, i) = 1;
  69.        
  70.     elseif target(i, 1) == 6
  71.         train_output(7, i) = 1;
  72.        
  73.     elseif target(i, 1) == 7
  74.         train_output(8, i) = 1;
  75.        
  76.     elseif target(i, 1) == 8
  77.         train_output(9, i) = 1;
  78.        
  79.     else
  80.         train_output(10, i) = 1;
  81.        
  82.     end
  83.    
  84. end
  85.  
  86. %esquema onehot para os testes
  87.  
  88. for i = 1:10000
  89.    
  90.     if target_test(i, 1) == 0
  91.         test_output(1, i) = 1;
  92.        
  93.     elseif target_test(i, 1) == 1
  94.         test_output(2, i) = 1;
  95.        
  96.     elseif target_test(i, 1) == 2
  97.         test_output(3, i) = 1;
  98.        
  99.     elseif target_test(i, 1) == 3
  100.         test_output(4, i) = 1;
  101.        
  102.     elseif target_test(i, 1) == 4
  103.         test_output(5, i) = 1;
  104.        
  105.     elseif target_test(i, 1) == 5
  106.         test_output(6, i) = 1;
  107.        
  108.     elseif target_test(i, 1) == 6
  109.         test_output(7, i) = 1;
  110.        
  111.     elseif target_test(i, 1) == 7
  112.         test_output(8, i) = 1;
  113.        
  114.     elseif target_test(i, 1) == 8
  115.         test_output(9, i) = 1;
  116.        
  117.     else
  118.         test_output(10, i) = 1;
  119.            
  120.     end
  121. end
  122.  
  123. % normalização dos dados
  124. a_train = min(input(:));
  125. b_train = max(input(:));
  126. ra_train = 1;
  127. rb_train = -1;
  128.  
  129. a_test = min(input_test(:));
  130. b_test = max(input_test(:));
  131. ra_test = 1;
  132. rb_test = -1;
  133.  
  134. data_train = transpose((((ra_train-rb_train)*(input-a_train))/(b_train - a_train)) + rb_train);
  135. data_test  = transpose((((ra_test-rb_test)*(input_test-a_test))/(b_test - a_test)) + rb_test);
  136.  
  137. % criação da rede neural
  138. net = newff(data_train, train_output, [11 7], {'tansig', 'tansig'}, 'traingdx');
  139.  
  140. net.trainParam.epochs = 500;
  141. net.trainParam.goal = 0.01;  
  142. net.trainParam.lr = 0.01;          
  143.  
  144. net.divideParam.trainRatio = 0.8;
  145. net.divideParam.valRatio = 0.2;
  146. net.divideParam.testRatio = 0;
  147.  
  148. % treinamento da rede
  149. net = train(net, data_train, train_output);
  150.  
  151. % simulação da rede
  152. Y = sim(net, data_train);
  153. Y_t = sim(net, data_test);
  154.  
  155. % confusion matrix
  156. for i = 1:60000
  157.     x = max([Y(1,i), Y(2,i), Y(3, i), Y(4, i), Y(5, i), Y(6, i), Y(7, i), Y(8, i), Y(9, i), Y(10, i)]);
  158.     for j = 1:10
  159.         if Y(j, i) == x
  160.             confusion_matrix(j, i) = 1;
  161.         end
  162.     end
  163. end
  164.  
  165. figure;
  166. plotconfusion(train_output, confusion_matrix);
  167.  
  168. for i = 1:10000
  169.     x = max([Y_t(1,i), Y_t(2,i), Y_t(3, i), Y_t(4, i), Y_t(5, i), Y_t(6, i), Y_t(7, i), Y_t(8, i), Y_t(9, i), Y_t(10, i)]);
  170.     for j = 1:10
  171.         if Y_t(j, i) == x
  172.             confusion_test_matrix(j, i) = 1;
  173.         end
  174.     end
  175. end
  176.  
  177. figure;
  178. plotconfusion(test_output, confusion_test_matrix);
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement