Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- ---------------------------------------------------------------------------
- RuntimeError Traceback (most recent call last)
- Cell In [19], line 1
- ----> 1 learn.fit(10, lr=0.1)
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\fastai\learner.py:256, in Learner.fit(self, n_epoch, lr, wd, cbs, reset_opt, start_epoch)
- 254 self.opt.set_hypers(lr=self.lr if lr is None else lr)
- 255 self.n_epoch = n_epoch
- --> 256 self._with_events(self._do_fit, 'fit', CancelFitException, self._end_cleanup)
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\fastai\learner.py:193, in Learner._with_events(self, f, event_type, ex, final)
- 192 def _with_events(self, f, event_type, ex, final=noop):
- --> 193 try: self(f'before_{event_type}'); f()
- 194 except ex: self(f'after_cancel_{event_type}')
- 195 self(f'after_{event_type}'); final()
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\fastai\learner.py:245, in Learner._do_fit(self)
- 243 for epoch in range(self.n_epoch):
- 244 self.epoch=epoch
- --> 245 self._with_events(self._do_epoch, 'epoch', CancelEpochException)
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\fastai\learner.py:193, in Learner._with_events(self, f, event_type, ex, final)
- 192 def _with_events(self, f, event_type, ex, final=noop):
- --> 193 try: self(f'before_{event_type}'); f()
- 194 except ex: self(f'after_cancel_{event_type}')
- 195 self(f'after_{event_type}'); final()
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\fastai\learner.py:239, in Learner._do_epoch(self)
- 238 def _do_epoch(self):
- --> 239 self._do_epoch_train()
- 240 self._do_epoch_validate()
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\fastai\learner.py:231, in Learner._do_epoch_train(self)
- 229 def _do_epoch_train(self):
- 230 self.dl = self.dls.train
- --> 231 self._with_events(self.all_batches, 'train', CancelTrainException)
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\fastai\learner.py:193, in Learner._with_events(self, f, event_type, ex, final)
- 192 def _with_events(self, f, event_type, ex, final=noop):
- --> 193 try: self(f'before_{event_type}'); f()
- 194 except ex: self(f'after_cancel_{event_type}')
- 195 self(f'after_{event_type}'); final()
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\fastai\learner.py:199, in Learner.all_batches(self)
- 197 def all_batches(self):
- 198 self.n_iter = len(self.dl)
- --> 199 for o in enumerate(self.dl): self.one_batch(*o)
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\fastai\learner.py:227, in Learner.one_batch(self, i, b)
- 225 b = self._set_device(b)
- 226 self._split(b)
- --> 227 self._with_events(self._do_one_batch, 'batch', CancelBatchException)
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\fastai\learner.py:193, in Learner._with_events(self, f, event_type, ex, final)
- 192 def _with_events(self, f, event_type, ex, final=noop):
- --> 193 try: self(f'before_{event_type}'); f()
- 194 except ex: self(f'after_cancel_{event_type}')
- 195 self(f'after_{event_type}'); final()
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\fastai\learner.py:208, in Learner._do_one_batch(self)
- 206 self('after_pred')
- 207 if len(self.yb):
- --> 208 self.loss_grad = self.loss_func(self.pred, *self.yb)
- 209 self.loss = self.loss_grad.clone()
- 210 self('after_loss')
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\fastai\losses.py:71, in CrossEntropyLossFlat.__init__(self, axis, *args, **kwargs)
- 65 @use_kwargs_dict(keep=True, weight=None, ignore_index=-100, reduction='mean')
- 66 def __init__(self,
- 67 *args,
- 68 axis:int=-1, # Class axis
- 69 **kwargs
- 70 ):
- ---> 71 super().__init__(nn.CrossEntropyLoss, *args, axis=axis, **kwargs)
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\fastai\losses.py:28, in BaseLoss.__init__(self, loss_cls, axis, flatten, floatify, is_2d, *args, **kwargs)
- 18 def __init__(self,
- 19 loss_cls, # Uninitialized PyTorch-compatible loss
- 20 *args,
- (...)
- 25 **kwargs
- 26 ):
- 27 store_attr("axis,flatten,floatify,is_2d")
- ---> 28 self.func = loss_cls(*args,**kwargs)
- 29 functools.update_wrapper(self, self.func)
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\torch\nn\modules\loss.py:1159, in CrossEntropyLoss.__init__(self, weight, size_average, ignore_index, reduce, reduction, label_smoothing)
- 1157 def __init__(self, weight: Optional[Tensor] = None, size_average=None, ignore_index: int = -100,
- 1158 reduce=None, reduction: str = 'mean', label_smoothing: float = 0.0) -> None:
- -> 1159 super(CrossEntropyLoss, self).__init__(weight, size_average, reduce, reduction)
- 1160 self.ignore_index = ignore_index
- 1161 self.label_smoothing = label_smoothing
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\torch\nn\modules\loss.py:25, in _WeightedLoss.__init__(self, weight, size_average, reduce, reduction)
- 24 def __init__(self, weight: Optional[Tensor] = None, size_average=None, reduce=None, reduction: str = 'mean') -> None:
- ---> 25 super(_WeightedLoss, self).__init__(size_average, reduce, reduction)
- 26 self.register_buffer('weight', weight)
- 27 self.weight: Optional[Tensor]
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\torch\nn\modules\loss.py:18, in _Loss.__init__(self, size_average, reduce, reduction)
- 16 super(_Loss, self).__init__()
- 17 if size_average is not None or reduce is not None:
- ---> 18 self.reduction: str = _Reduction.legacy_get_string(size_average, reduce)
- 19 else:
- 20 self.reduction = reduction
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\torch\nn\_reduction.py:35, in legacy_get_string(size_average, reduce, emit_warning)
- 32 if reduce is None:
- 33 reduce = True
- ---> 35 if size_average and reduce:
- 36 ret = 'mean'
- 37 elif reduce:
- RuntimeError: Boolean value of Tensor with more than one value is ambiguous
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement