Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- ---------------------------------------------------------------------------
- ValueError Traceback (most recent call last)
- Cell In [17], line 1
- ----> 1 learn.fit(10, lr=0.1)
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\fastai\learner.py:256, in Learner.fit(self, n_epoch, lr, wd, cbs, reset_opt, start_epoch)
- 254 self.opt.set_hypers(lr=self.lr if lr is None else lr)
- 255 self.n_epoch = n_epoch
- --> 256 self._with_events(self._do_fit, 'fit', CancelFitException, self._end_cleanup)
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\fastai\learner.py:193, in Learner._with_events(self, f, event_type, ex, final)
- 192 def _with_events(self, f, event_type, ex, final=noop):
- --> 193 try: self(f'before_{event_type}'); f()
- 194 except ex: self(f'after_cancel_{event_type}')
- 195 self(f'after_{event_type}'); final()
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\fastai\learner.py:245, in Learner._do_fit(self)
- 243 for epoch in range(self.n_epoch):
- 244 self.epoch=epoch
- --> 245 self._with_events(self._do_epoch, 'epoch', CancelEpochException)
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\fastai\learner.py:193, in Learner._with_events(self, f, event_type, ex, final)
- 192 def _with_events(self, f, event_type, ex, final=noop):
- --> 193 try: self(f'before_{event_type}'); f()
- 194 except ex: self(f'after_cancel_{event_type}')
- 195 self(f'after_{event_type}'); final()
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\fastai\learner.py:239, in Learner._do_epoch(self)
- 238 def _do_epoch(self):
- --> 239 self._do_epoch_train()
- 240 self._do_epoch_validate()
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\fastai\learner.py:231, in Learner._do_epoch_train(self)
- 229 def _do_epoch_train(self):
- 230 self.dl = self.dls.train
- --> 231 self._with_events(self.all_batches, 'train', CancelTrainException)
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\fastai\learner.py:193, in Learner._with_events(self, f, event_type, ex, final)
- 192 def _with_events(self, f, event_type, ex, final=noop):
- --> 193 try: self(f'before_{event_type}'); f()
- 194 except ex: self(f'after_cancel_{event_type}')
- 195 self(f'after_{event_type}'); final()
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\fastai\learner.py:199, in Learner.all_batches(self)
- 197 def all_batches(self):
- 198 self.n_iter = len(self.dl)
- --> 199 for o in enumerate(self.dl): self.one_batch(*o)
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\fastai\learner.py:227, in Learner.one_batch(self, i, b)
- 225 b = self._set_device(b)
- 226 self._split(b)
- --> 227 self._with_events(self._do_one_batch, 'batch', CancelBatchException)
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\fastai\learner.py:193, in Learner._with_events(self, f, event_type, ex, final)
- 192 def _with_events(self, f, event_type, ex, final=noop):
- --> 193 try: self(f'before_{event_type}'); f()
- 194 except ex: self(f'after_cancel_{event_type}')
- 195 self(f'after_{event_type}'); final()
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\fastai\learner.py:208, in Learner._do_one_batch(self)
- 206 self('after_pred')
- 207 if len(self.yb):
- --> 208 self.loss_grad = self.loss_func(self.pred, *self.yb)
- 209 self.loss = self.loss_grad.clone()
- 210 self('after_loss')
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\fastai\losses.py:54, in BaseLoss.__call__(self, inp, targ, **kwargs)
- 52 if targ.dtype in [torch.int8, torch.int16, torch.int32]: targ = targ.long()
- 53 if self.flatten: inp = inp.view(-1,inp.shape[-1]) if self.is_2d else inp.view(-1)
- ---> 54 return self.func.__call__(inp, targ.view(-1) if self.flatten else targ, **kwargs)
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\torch\nn\modules\module.py:1130, in Module._call_impl(self, *input, **kwargs)
- 1126 # If we don't have any hooks, we want to skip the rest of the logic in
- 1127 # this function, and just call forward.
- 1128 if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks
- 1129 or _global_forward_hooks or _global_forward_pre_hooks):
- -> 1130 return forward_call(*input, **kwargs)
- 1131 # Do not call functions when jit is used
- 1132 full_backward_hooks, non_full_backward_hooks = [], []
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\torch\nn\modules\loss.py:1164, in CrossEntropyLoss.forward(self, input, target)
- 1163 def forward(self, input: Tensor, target: Tensor) -> Tensor:
- -> 1164 return F.cross_entropy(input, target, weight=self.weight,
- 1165 ignore_index=self.ignore_index, reduction=self.reduction,
- 1166 label_smoothing=self.label_smoothing)
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\torch\nn\functional.py:3000, in cross_entropy(input, target, weight, size_average, ignore_index, reduce, reduction, label_smoothing)
- 2934 r"""This criterion computes the cross entropy loss between input and target.
- 2935
- 2936 See :class:`~torch.nn.CrossEntropyLoss` for details.
- (...)
- 2997 >>> loss.backward()
- 2998 """
- 2999 if has_torch_function_variadic(input, target, weight):
- -> 3000 return handle_torch_function(
- 3001 cross_entropy,
- 3002 (input, target, weight),
- 3003 input,
- 3004 target,
- 3005 weight=weight,
- 3006 size_average=size_average,
- 3007 ignore_index=ignore_index,
- 3008 reduce=reduce,
- 3009 reduction=reduction,
- 3010 label_smoothing=label_smoothing,
- 3011 )
- 3012 if size_average is not None or reduce is not None:
- 3013 reduction = _Reduction.legacy_get_string(size_average, reduce)
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\torch\overrides.py:1498, in handle_torch_function(public_api, relevant_args, *args, **kwargs)
- 1492 warnings.warn("Defining your `__torch_function__ as a plain method is deprecated and "
- 1493 "will be an error in future, please define it as a classmethod.",
- 1494 DeprecationWarning)
- 1496 # Use `public_api` instead of `implementation` so __torch_function__
- 1497 # implementations can do equality/identity comparisons.
- -> 1498 result = torch_func_method(public_api, types, args, kwargs)
- 1500 if result is not NotImplemented:
- 1501 return result
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\fastai\torch_core.py:376, in TensorBase.__torch_function__(cls, func, types, args, kwargs)
- 374 if cls.debug and func.__name__ not in ('__str__','__repr__'): print(func, types, args, kwargs)
- 375 if _torch_handled(args, cls._opt, func): types = (torch.Tensor,)
- --> 376 res = super().__torch_function__(func, types, args, ifnone(kwargs, {}))
- 377 dict_objs = _find_args(args) if args else _find_args(list(kwargs.values()))
- 378 if issubclass(type(res),TensorBase) and dict_objs: res.set_meta(dict_objs[0],as_copy=True)
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\torch\_tensor.py:1121, in Tensor.__torch_function__(cls, func, types, args, kwargs)
- 1118 return NotImplemented
- 1120 with _C.DisableTorchFunction():
- -> 1121 ret = func(*args, **kwargs)
- 1122 if func in get_default_nowrap_functions():
- 1123 return ret
- File c:\Users\teamt\miniconda3\envs\fastai\lib\site-packages\torch\nn\functional.py:3014, in cross_entropy(input, target, weight, size_average, ignore_index, reduce, reduction, label_smoothing)
- 3012 if size_average is not None or reduce is not None:
- 3013 reduction = _Reduction.legacy_get_string(size_average, reduce)
- -> 3014 return torch._C._nn.cross_entropy_loss(input, target, weight, _Reduction.get_enum(reduction), ignore_index, label_smoothing)
- ValueError: Expected input batch_size (0) to match target batch_size (8).
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement