TypeError: MPTForCausalLM.forward() got an unexpected keyword argument 'inputs_embeds'
I'm using peft and trainer to fine tune the model and got the following error:
trainer.train()
0%| | 0/10 [20:45:22<?, ?it/s]
0%| | 0/10 [00:00<?, ?it/s]Traceback (most recent call last):
File "", line 1, in
File "/transformers/trainer.py", line 1537, in train
return inner_training_loop(
File "/transformers/trainer.py", line 1811, in _inner_training_loop
tr_loss_step = self.training_step(model, inputs)
File "/transformers/trainer.py", line 2632, in training_step
loss = self.compute_loss(model, inputs)
File "/transformers/trainer.py", line 2657, in compute_loss
outputs = model(**inputs)
File "/torch/nn/modules/module.py", line 1502, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/torch/nn/modules/module.py", line 1511, in _call_impl
return forward_call(*args, **kwargs)
File "/torch/nn/parallel/data_parallel.py", line 181, in forward
outputs = self.parallel_apply(replicas, inputs, module_kwargs)
File "/torch/nn/parallel/data_parallel.py", line 196, in parallel_apply
return parallel_apply(replicas, inputs, kwargs, self.device_ids[:len(replicas)])
File "/torch/nn/parallel/parallel_apply.py", line 110, in parallel_apply
output.reraise()
File "/torch/_utils.py", line 658, in reraise
raise exception
TypeError: Caught TypeError in replica 0 on device 0.
Original Traceback (most recent call last):
File "/torch/nn/parallel/parallel_apply.py", line 85, in _worker
output = module(*input, **kwargs)
File "/torch/nn/modules/module.py", line 1502, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/torch/nn/modules/module.py", line 1511, in _call_impl
return forward_call(*args, **kwargs)
File "/peft/peft_model.py", line 739, in forward
return self.base_model(
File "/torch/nn/modules/module.py", line 1502, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/torch/nn/modules/module.py", line 1511, in _call_impl
return forward_call(*args, **kwargs)
File "/peft/peft_model.py", line 739, in forward
return self.base_model(
File "/torch/nn/modules/module.py", line 1502, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/torch/nn/modules/module.py", line 1511, in _call_impl
return forward_call(*args, **kwargs)
File "/accelerate/hooks.py", line 165, in new_forward
output = old_forward(*args, **kwargs)
TypeError: MPTForCausalLM.forward() got an unexpected keyword argument 'inputs_embeds'
Versions:
transformers @ git+https://github.com/huggingface/transformers.git@70c79940957fb25b54bd1b106935c756b90345eb
torch==2.1.0.dev20230601
peft @ git+https://github.com/huggingface/peft.git@189a6b8e357ecda05ccde13999e4c35759596a67
Turns out that uninstalling peft and installing the latest commit worked