Advertisement
Lissanro

RuntimeError: expected dtype Float but got dtype Half

Jun 5th, 2020
32
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 5.26 KB | None | 0 0
  1. ---------------------------------------------------------------------------
  2. RuntimeError Traceback (most recent call last)
  3. <ipython-input-14-243bd3d406ab> in <module>
  4. ----> 1 learner.fine_tune(10)
  5.  
  6. /usr/local/lib/python3.8/dist-packages/fastcore/utils.py in _f(*args, **kwargs)
  7. 429 init_args.update(log)
  8. 430 setattr(inst, 'init_args', init_args)
  9. --> 431 return inst if to_return else f(*args, **kwargs)
  10. 432 return _f
  11. 433
  12.  
  13. /usr/local/lib/python3.8/dist-packages/fastai2/callback/schedule.py in fine_tune(self, epochs, base_lr, freeze_epochs, lr_mult, pct_start, div, **kwargs)
  14. 146 "Fine tune with `freeze` for `freeze_epochs` then with `unfreeze` from `epochs` using discriminative LR"
  15. 147 self.freeze()
  16. --> 148 self.fit_one_cycle(freeze_epochs, slice(base_lr), pct_start=0.99, **kwargs)
  17. 149 base_lr /= 2
  18. 150 self.unfreeze()
  19.  
  20. /usr/local/lib/python3.8/dist-packages/fastcore/utils.py in _f(*args, **kwargs)
  21. 429 init_args.update(log)
  22. 430 setattr(inst, 'init_args', init_args)
  23. --> 431 return inst if to_return else f(*args, **kwargs)
  24. 432 return _f
  25. 433
  26.  
  27. /usr/local/lib/python3.8/dist-packages/fastai2/callback/schedule.py in fit_one_cycle(self, n_epoch, lr_max, div, div_final, pct_start, wd, moms, cbs, reset_opt)
  28. 98 scheds = {'lr': combined_cos(pct_start, lr_max/div, lr_max, lr_max/div_final),
  29. 99 'mom': combined_cos(pct_start, *(self.moms if moms is None else moms))}
  30. --> 100 self.fit(n_epoch, cbs=ParamScheduler(scheds)+L(cbs), reset_opt=reset_opt, wd=wd)
  31. 101
  32. 102 # Cell
  33.  
  34. /usr/local/lib/python3.8/dist-packages/fastcore/utils.py in _f(*args, **kwargs)
  35. 429 init_args.update(log)
  36. 430 setattr(inst, 'init_args', init_args)
  37. --> 431 return inst if to_return else f(*args, **kwargs)
  38. 432 return _f
  39. 433
  40.  
  41. /usr/local/lib/python3.8/dist-packages/fastai2/learner.py in fit(self, n_epoch, lr, wd, cbs, reset_opt)
  42. 201 try:
  43. 202 self.epoch=epoch; self('begin_epoch')
  44. --> 203 self._do_epoch_train()
  45. 204 self._do_epoch_validate()
  46. 205 except CancelEpochException: self('after_cancel_epoch')
  47.  
  48. /usr/local/lib/python3.8/dist-packages/fastai2/learner.py in _do_epoch_train(self)
  49. 173 try:
  50. 174 self.dl = self.dls.train; self('begin_train')
  51. --> 175 self.all_batches()
  52. 176 except CancelTrainException: self('after_cancel_train')
  53. 177 finally: self('after_train')
  54.  
  55. /usr/local/lib/python3.8/dist-packages/fastai2/learner.py in all_batches(self)
  56. 151 def all_batches(self):
  57. 152 self.n_iter = len(self.dl)
  58. --> 153 for o in enumerate(self.dl): self.one_batch(*o)
  59. 154
  60. 155 def one_batch(self, i, b):
  61.  
  62. /usr/local/lib/python3.8/dist-packages/fastai2/learner.py in one_batch(self, i, b)
  63. 157 try:
  64. 158 self._split(b); self('begin_batch')
  65. --> 159 self.pred = self.model(*self.xb); self('after_pred')
  66. 160 if len(self.yb) == 0: return
  67. 161 self.loss = self.loss_func(self.pred, *self.yb); self('after_loss')
  68.  
  69. /usr/local/lib/python3.8/dist-packages/torch/nn/modules/module.py in __call__(self, *input, **kwargs)
  70. 548 result = self._slow_forward(*input, **kwargs)
  71. 549 else:
  72. --> 550 result = self.forward(*input, **kwargs)
  73. 551 for hook in self._forward_hooks.values():
  74. 552 hook_result = hook(self, input, result)
  75.  
  76. /usr/local/lib/python3.8/dist-packages/efficientnet_pytorch/model.py in forward(self, inputs)
  77. 191 bs = inputs.size(0)
  78. 192 # Convolution layers
  79. --> 193 x = self.extract_features(inputs)
  80. 194
  81. 195 # Pooling and final linear layer
  82.  
  83. /usr/local/lib/python3.8/dist-packages/efficientnet_pytorch/model.py in extract_features(self, inputs)
  84. 180 if drop_connect_rate:
  85. 181 drop_connect_rate *= float(idx) / len(self._blocks)
  86. --> 182 x = block(x, drop_connect_rate=drop_connect_rate)
  87. 183
  88. 184 # Head
  89.  
  90. /usr/local/lib/python3.8/dist-packages/torch/nn/modules/module.py in __call__(self, *input, **kwargs)
  91. 550 result = self.forward(*input, **kwargs)
  92. 551 for hook in self._forward_hooks.values():
  93. --> 552 hook_result = hook(self, input, result)
  94. 553 if hook_result is not None:
  95. 554 result = hook_result
  96.  
  97. ~/Jupyter/manifold_mixup.py in hook_mixup(self, module, input, output)
  98. 140 if not self.mixup_has_been_applied: # performs mixup
  99. 141 output_dims = len(output.size())
  100. --> 142 output = torch.lerp(output[self.shuffle], output, weight=unsqueeze(self.lam, n=output_dims-1))
  101. 143 self.mixup_has_been_applied = True
  102. 144 return output
  103.  
  104. RuntimeError: expected dtype Float but got dtype Half
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement