Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- # Folder ze zdjęciami ma ~= 90MB
- # Generators
- from tensorflow.keras.preprocessing.image import ImageDataGenerator
- img_size = (150, 150)
- img_datagen = ImageDataGenerator(rescale=1/255, rotation_range=40, width_shift_range=0.2,
- height_shift_range=0.2, shear_range=0.2, zoom_range=0.2,
- horizontal_flip=True)
- img_batch_size = 20
- train_generator = img_datagen.flow_from_directory(train_dir,
- target_size=img_size,
- batch_size=img_batch_size,
- class_mode='binary')
- validation_generator = img_datagen.flow_from_directory(validation_dir,
- target_size=img_size,
- batch_size=img_batch_size,
- class_mode='binary')
- # Check shape
- for data_batch, label_batch in train_generator:
- data_batch # shape -> (20, 150, 150, 3) -> 1350000 bytes -> 1.35 MB
- label_batch # shape -> (20, )
- break
- # Train model
- history = model.fit(train_gen,
- steps_per_epoch=step_info['train'], # 100
- epochs=100,
- validation_data=valid_gen,
- validation_steps=step_info['validation'], # 50
- workers=4
- )
- # model.summary() output:
- #
- # Model: "sequential"
- # _________________________________________________________________
- # Layer (type) Output Shape Param #
- # =================================================================
- # conv2d (Conv2D) (None, 148, 148, 32) 896
- # _________________________________________________________________
- # max_pooling2d (MaxPooling2D) (None, 74, 74, 32) 0
- # _________________________________________________________________
- # conv2d_1 (Conv2D) (None, 72, 72, 64) 18496
- # _________________________________________________________________
- # max_pooling2d_1 (MaxPooling2 (None, 36, 36, 64) 0
- # _________________________________________________________________
- # conv2d_2 (Conv2D) (None, 34, 34, 128) 73856
- # _________________________________________________________________
- # max_pooling2d_2 (MaxPooling2 (None, 17, 17, 128) 0
- # _________________________________________________________________
- # conv2d_3 (Conv2D) (None, 15, 15, 128) 147584
- # _________________________________________________________________
- # max_pooling2d_3 (MaxPooling2 (None, 7, 7, 128) 0
- # _________________________________________________________________
- # flatten (Flatten) (None, 6272) 0
- # _________________________________________________________________
- # dropout (Dropout) (None, 6272) 0
- # _________________________________________________________________
- # dense (Dense) (None, 512) 3211776
- # _________________________________________________________________
- # dense_1 (Dense) (None, 1) 513
- # =================================================================
- # Total params: 3,453,121
- # Trainable params: 3,453,121
- # Non-trainable params: 0
- # Zapisany model ma ~= 27 MB
- # Pycharm OutOfMemory przy 12GB przydzielonych,
- # Jupyter notebook - w trakcie trenowania wywala się przeglądarka - brak pamięci. Błąd w CMD:
- #
- # W tensorflow/core/framework/op_kernel.cc:1643] Resource exhausted: MemoryError: Unable to allocate 5.15 MiB for an array with shape (20, 150, 150, 3) and data type float32
- # Traceback (most recent call last):
- #
- # File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\tensorflow_core\python\ops\script_ops.py", line 236, in __call__
- # ret = func(*args)
- #
- # File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\tensorflow_core\python\data\ops\dataset_ops.py", line 789, in generator_py_func
- # values = next(generator_state.get_iterator(iterator_id))
- #
- # File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\tensorflow_core\python\keras\engine\data_adapter.py", line 975, in generator_fn
- # yield x[i]
- #
- # File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\keras_preprocessing\image\iterator.py", line 65, in __getitem__
- # return self._get_batches_of_transformed_samples(index_array)
- #
- # File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\keras_preprocessing\image\iterator.py", line 222, in _get_batches_of_transformed_samples
- # batch_x = np.zeros((len(index_array),) + self.image_shape, dtype=self.dtype)
- #
- # MemoryError: Unable to allocate 5.15 MiB for an array with shape (20, 150, 150, 3) and data type float32
- #
- #
- # 2020-10-28 21:11:02.559451: W tensorflow/core/common_runtime/base_collective_executor.cc:217] BaseCollectiveExecutor::StartAbort Resource exhausted: MemoryError: Unable to allocate 5.15 MiB for an array with shape (20, 150, 150, 3) and data type float32
- # Traceback (most recent call last):
- #
- # File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\tensorflow_core\python\ops\script_ops.py", line 236, in __call__
- # ret = func(*args)
- #
- # File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\tensorflow_core\python\data\ops\dataset_ops.py", line 789, in generator_py_func
- # values = next(generator_state.get_iterator(iterator_id))
- #
- # File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\tensorflow_core\python\keras\engine\data_adapter.py", line 975, in generator_fn
- # yield x[i]
- #
- # File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\keras_preprocessing\image\iterator.py", line 65, in __getitem__
- # return self._get_batches_of_transformed_samples(index_array)
- #
- # File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\keras_preprocessing\image\iterator.py", line 222, in _get_batches_of_transformed_samples
- # batch_x = np.zeros((len(index_array),) + self.image_shape, dtype=self.dtype)
- #
- # MemoryError: Unable to allocate 5.15 MiB for an array with shape (20, 150, 150, 3) and data type float32
- #
- #
- # [[{{node PyFunc}}]]
- # Hint: If you want to see a list of allocated tensors when OOM happens, add report_tensor_allocations_upon_oom to RunOptions for current allocation info.
- #
- # [[IteratorGetNext]]
- # Hint: If you want to see a list of allocated tensors when OOM happens, add report_tensor_allocations_upon_oom to RunOptions for current allocation info.
- #
- # [[Shape_15/_12]]
- # Hint: If you want to see a list of allocated tensors when OOM happens, add report_tensor_allocations_upon_oom to RunOptions for current allocation info.
- #
- # 2020-10-28 21:11:02.559983: W tensorflow/core/common_runtime/base_collective_executor.cc:217] BaseCollectiveExecutor::StartAbort Resource exhausted: MemoryError: Unable to allocate 5.15 MiB for an array with shape (20, 150, 150, 3) and data type float32
- # Traceback (most recent call last):
- #
- # File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\tensorflow_core\python\ops\script_ops.py", line 236, in __call__
- # ret = func(*args)
- #
- # File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\tensorflow_core\python\data\ops\dataset_ops.py", line 789, in generator_py_func
- # values = next(generator_state.get_iterator(iterator_id))
- #
- # File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\tensorflow_core\python\keras\engine\data_adapter.py", line 975, in generator_fn
- # yield x[i]
- #
- # File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\keras_preprocessing\image\iterator.py", line 65, in __getitem__
- # return self._get_batches_of_transformed_samples(index_array)
- #
- # File "D:\Inne\DevelopmentEnviroments\Anaconda\envs\scientificProject\lib\site-packages\keras_preprocessing\image\iterator.py", line 222, in _get_batches_of_transformed_samples
- # batch_x = np.zeros((len(index_array),) + self.image_shape, dtype=self.dtype)
- #
- # MemoryError: Unable to allocate 5.15 MiB for an array with shape (20, 150, 150, 3) and data type float32
- #
- #
- # [[{{node PyFunc}}]]
- # Hint: If you want to see a list of allocated tensors when OOM happens, add report_tensor_allocations_upon_oom to RunOptions for current allocation info.
- #
- # [[IteratorGetNext]]
- # Hint: If you want to see a list of allocated tensors when OOM happens, add report_tensor_allocations_upon_oom to RunOptions for current allocation info.
- # Out of memory w przeglądarce a w menadżerze jest 8GB/16
- # Po każdym użyciu robię:
- from keras import backend as K
- import gc
- K.clear_session()
- gc.collect()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement