Advertisement
albert828

Keras_OOM

Oct 29th, 2020
1,808
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 9.09 KB | None | 0 0
  1. # Folder ze zdjęciami ma ~= 90MB
  2.  
  3. # Generators
  4. from tensorflow.keras.preprocessing.image import ImageDataGenerator
  5.  
  6. img_size = (150, 150)
  7. img_datagen = ImageDataGenerator(rescale=1/255, rotation_range=40, width_shift_range=0.2,
  8.                                  height_shift_range=0.2, shear_range=0.2, zoom_range=0.2,
  9.                                  horizontal_flip=True)
  10. img_batch_size = 20
  11. train_generator = img_datagen.flow_from_directory(train_dir,
  12.                                                   target_size=img_size,
  13.                                                   batch_size=img_batch_size,
  14.                                                   class_mode='binary')
  15. validation_generator = img_datagen.flow_from_directory(validation_dir,
  16.                                                        target_size=img_size,
  17.                                                        batch_size=img_batch_size,
  18.                                                        class_mode='binary')
  19. # Check shape
  20. for data_batch, label_batch in train_generator:
  21.     data_batch      # shape -> (20, 150, 150, 3) -> 1350000 bytes -> 1.35 MB
  22.     label_batch     # shape -> (20, )
  23.     break
  24.  
  25. # Train model
  26. history = model.fit(train_gen,
  27.                     steps_per_epoch=step_info['train'], # 100
  28.                     epochs=100,
  29.                     validation_data=valid_gen,
  30.                     validation_steps=step_info['validation'], # 50
  31.                     workers=4
  32.                     )
  33.  
  34. # model.summary() output:
  35. #
  36.  
  37. # Model: "sequential"
  38. # _________________________________________________________________
  39. # Layer (type)                 Output Shape              Param #  
  40. # =================================================================
  41. # conv2d (Conv2D)              (None, 148, 148, 32)      896      
  42. # _________________________________________________________________
  43. # max_pooling2d (MaxPooling2D) (None, 74, 74, 32)        0        
  44. # _________________________________________________________________
  45. # conv2d_1 (Conv2D)            (None, 72, 72, 64)        18496    
  46. # _________________________________________________________________
  47. # max_pooling2d_1 (MaxPooling2 (None, 36, 36, 64)        0        
  48. # _________________________________________________________________
  49. # conv2d_2 (Conv2D)            (None, 34, 34, 128)       73856    
  50. # _________________________________________________________________
  51. # max_pooling2d_2 (MaxPooling2 (None, 17, 17, 128)       0        
  52. # _________________________________________________________________
  53. # conv2d_3 (Conv2D)            (None, 15, 15, 128)       147584    
  54. # _________________________________________________________________
  55. # max_pooling2d_3 (MaxPooling2 (None, 7, 7, 128)         0        
  56. # _________________________________________________________________
  57. # flatten (Flatten)            (None, 6272)              0        
  58. # _________________________________________________________________
  59. # dropout (Dropout)            (None, 6272)              0        
  60. # _________________________________________________________________
  61. # dense (Dense)                (None, 512)               3211776  
  62. # _________________________________________________________________
  63. # dense_1 (Dense)              (None, 1)                 513      
  64. # =================================================================
  65. # Total params: 3,453,121
  66. # Trainable params: 3,453,121
  67. # Non-trainable params: 0
  68.  
  69. # Zapisany model ma ~= 27 MB
  70.  
  71. # Pycharm OutOfMemory przy 12GB przydzielonych,
  72. # Jupyter notebook - w trakcie trenowania wywala się przeglądarka - brak pamięci. Błąd w CMD:
  73. #
  74. #  W tensorflow/core/framework/op_kernel.cc:1643] Resource exhausted: MemoryError: Unable to allocate 5.15 MiB for an array with shape (20, 150, 150, 3) and data type float32
  75. # Traceback (most recent call last):
  76. #
  77. #   File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\tensorflow_core\python\ops\script_ops.py", line 236, in __call__
  78. #     ret = func(*args)
  79. #
  80. #   File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\tensorflow_core\python\data\ops\dataset_ops.py", line 789, in generator_py_func
  81. #     values = next(generator_state.get_iterator(iterator_id))
  82. #
  83. #   File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\tensorflow_core\python\keras\engine\data_adapter.py", line 975, in generator_fn
  84. #     yield x[i]
  85. #
  86. #   File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\keras_preprocessing\image\iterator.py", line 65, in __getitem__
  87. #     return self._get_batches_of_transformed_samples(index_array)
  88. #
  89. #   File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\keras_preprocessing\image\iterator.py", line 222, in _get_batches_of_transformed_samples
  90. #     batch_x = np.zeros((len(index_array),) + self.image_shape, dtype=self.dtype)
  91. #
  92. # MemoryError: Unable to allocate 5.15 MiB for an array with shape (20, 150, 150, 3) and data type float32
  93. #
  94. #
  95. # 2020-10-28 21:11:02.559451: W tensorflow/core/common_runtime/base_collective_executor.cc:217] BaseCollectiveExecutor::StartAbort Resource exhausted: MemoryError: Unable to allocate 5.15 MiB for an array with shape (20, 150, 150, 3) and data type float32
  96. # Traceback (most recent call last):
  97. #
  98. #   File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\tensorflow_core\python\ops\script_ops.py", line 236, in __call__
  99. #     ret = func(*args)
  100. #
  101. #   File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\tensorflow_core\python\data\ops\dataset_ops.py", line 789, in generator_py_func
  102. #     values = next(generator_state.get_iterator(iterator_id))
  103. #
  104. #   File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\tensorflow_core\python\keras\engine\data_adapter.py", line 975, in generator_fn
  105. #     yield x[i]
  106. #
  107. #   File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\keras_preprocessing\image\iterator.py", line 65, in __getitem__
  108. #     return self._get_batches_of_transformed_samples(index_array)
  109. #
  110. #   File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\keras_preprocessing\image\iterator.py", line 222, in _get_batches_of_transformed_samples
  111. #     batch_x = np.zeros((len(index_array),) + self.image_shape, dtype=self.dtype)
  112. #
  113. # MemoryError: Unable to allocate 5.15 MiB for an array with shape (20, 150, 150, 3) and data type float32
  114. #
  115. #
  116. #          [[{{node PyFunc}}]]
  117. # Hint: If you want to see a list of allocated tensors when OOM happens, add report_tensor_allocations_upon_oom to RunOptions for current allocation info.
  118. #
  119. #          [[IteratorGetNext]]
  120. # Hint: If you want to see a list of allocated tensors when OOM happens, add report_tensor_allocations_upon_oom to RunOptions for current allocation info.
  121. #
  122. #          [[Shape_15/_12]]
  123. # Hint: If you want to see a list of allocated tensors when OOM happens, add report_tensor_allocations_upon_oom to RunOptions for current allocation info.
  124. #
  125. # 2020-10-28 21:11:02.559983: W tensorflow/core/common_runtime/base_collective_executor.cc:217] BaseCollectiveExecutor::StartAbort Resource exhausted: MemoryError: Unable to allocate 5.15 MiB for an array with shape (20, 150, 150, 3) and data type float32
  126. # Traceback (most recent call last):
  127. #
  128. #   File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\tensorflow_core\python\ops\script_ops.py", line 236, in __call__
  129. #     ret = func(*args)
  130. #
  131. #   File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\tensorflow_core\python\data\ops\dataset_ops.py", line 789, in generator_py_func
  132. #     values = next(generator_state.get_iterator(iterator_id))
  133. #
  134. #   File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\tensorflow_core\python\keras\engine\data_adapter.py", line 975, in generator_fn
  135. #     yield x[i]
  136. #
  137. #   File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\keras_preprocessing\image\iterator.py", line 65, in __getitem__
  138. #     return self._get_batches_of_transformed_samples(index_array)
  139. #
  140. #   File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\keras_preprocessing\image\iterator.py", line 222, in _get_batches_of_transformed_samples
  141. #     batch_x = np.zeros((len(index_array),) + self.image_shape, dtype=self.dtype)
  142. #
  143. # MemoryError: Unable to allocate 5.15 MiB for an array with shape (20, 150, 150, 3) and data type float32
  144. #
  145. #
  146. #          [[{{node PyFunc}}]]
  147. # Hint: If you want to see a list of allocated tensors when OOM happens, add report_tensor_allocations_upon_oom to RunOptions for current allocation info.
  148. #
  149. #          [[IteratorGetNext]]
  150. # Hint: If you want to see a list of allocated tensors when OOM happens, add report_tensor_allocations_upon_oom to RunOptions for current allocation info.
  151.  
  152.  
  153.  
  154. # Out of memory w przeglądarce a w menadżerze jest 8GB/16
  155.  
  156. # Po każdym użyciu robię:
  157.  
  158. from keras import backend as K
  159. import gc
  160.  
  161. K.clear_session()
  162. gc.collect()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement