runtime error

Exit code: 1. Reason: _loop batch_samples, num_items_in_batch = self.get_batch_samples(epoch_iterator, num_batches, args.device) File "/usr/local/lib/python3.10/site-packages/transformers/trainer.py", line 5581, in get_batch_samples batch_samples.append(next(epoch_iterator)) File "/usr/local/lib/python3.10/site-packages/accelerate/data_loader.py", line 567, in __iter__ current_batch = next(dataloader_iter) File "/usr/local/lib/python3.10/site-packages/torch/utils/data/dataloader.py", line 734, in __next__ data = self._next_data() File "/usr/local/lib/python3.10/site-packages/torch/utils/data/dataloader.py", line 790, in _next_data data = self._dataset_fetcher.fetch(index) # may raise StopIteration File "/usr/local/lib/python3.10/site-packages/torch/utils/data/_utils/fetch.py", line 55, in fetch return self.collate_fn(data) File "/usr/local/lib/python3.10/site-packages/transformers/data/data_collator.py", line 272, in __call__ batch = pad_without_fast_tokenizer_warning( File "/usr/local/lib/python3.10/site-packages/transformers/data/data_collator.py", line 67, in pad_without_fast_tokenizer_warning padded = tokenizer.pad(*pad_args, **pad_kwargs) File "/usr/local/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 3430, in pad return BatchEncoding(batch_outputs, tensor_type=return_tensors) File "/usr/local/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 240, in __init__ self.convert_to_tensors(tensor_type=tensor_type, prepend_batch_axis=prepend_batch_axis) File "/usr/local/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 783, in convert_to_tensors raise ValueError( ValueError: Unable to create tensor, you should probably activate truncation and/or padding with 'padding=True' 'truncation=True' to have batched tensors with the same length. Perhaps your features (`labels` in this case) have excessive nesting (inputs type `list` where type `int` is expected). 0%| | 0/125 [00:01<?, ?it/s]

Container logs:

Fetching error logs...