-
Notifications
You must be signed in to change notification settings - Fork 32.5k
Description
Python3.8, torch-1.7.1, transformers-4.14.0.dev0
Errors in running example on https://huggingface.co/docs/transformers/model_doc/perceiver
from transformers import PerceiverTokenizer, PerceiverForMaskedLM
import torch
tokenizer = PerceiverTokenizer.from_pretrained('deepmind/language-perceiver')
model = PerceiverForMaskedLM.from_pretrained('deepmind/language-perceiver')
inputs = tokenizer("The capital of France is [MASK].", return_tensors="pt")
labels = tokenizer("The capital of France is Paris.", return_tensors="pt")["input_ids"]
outputs = model(**inputs, labels=labels)
loss = outputs.loss
logits = outputs.logitsTraceback (most recent call last):
File "", line 1, in
File "/opt/software/install/miniconda38/lib/python3.8/site-packages/torch/nn/modules/module.py", line 727, in _call_impl
result = self.forward(*input, **kwargs)
File "/opt/software/install/miniconda38/lib/python3.8/site-packages/transformers/models/perceiver/modeling_perceiver.py", line 950, in forward
masked_lm_loss = loss_fct(logits.view(-1, self.config.vocab_size), labels.view(-1))
File "/opt/software/install/miniconda38/lib/python3.8/site-packages/torch/nn/modules/module.py", line 727, in _call_impl
result = self.forward(*input, **kwargs)
File "/opt/software/install/miniconda38/lib/python3.8/site-packages/torch/nn/modules/loss.py", line 961, in forward
return F.cross_entropy(input, target, weight=self.weight,
File "/opt/software/install/miniconda38/lib/python3.8/site-packages/torch/nn/functional.py", line 2468, in cross_entropy
return nll_loss(log_softmax(input, 1), target, weight, None, ignore_index, None, reduction)
File "/opt/software/install/miniconda38/lib/python3.8/site-packages/torch/nn/functional.py", line 2261, in nll_loss
raise ValueError('Expected input batch_size ({}) to match target batch_size ({}).'
ValueError: Expected input batch_size (2048) to match target batch_size (33).