import transformers
model_name = "/dataset/Llama-4-Scout-17B-16E-Instruct"
config = transformers.Llama4Config.from_pretrained(model_name)
config.vision_config.num_hidden_layers = 1 # Reduce layers for testing
config.text_config.num_hidden_layers = 1
model = transformers.Llama4ForConditionalGeneration(config)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/xinhe/miniforge3/lib/python3.12/site-packages/transformers/models/llama4/modeling_llama4.py", line 1190, in __init__
self.pad_token_id = self.config.pad_token_id if self.config.pad_token_id is not None else -1
^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/xinhe/miniforge3/lib/python3.12/site-packages/transformers/configuration_utils.py", line 164, in __getattribute__
return super().__getattribute__(key)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
AttributeError: 'Llama4Config' object has no attribute 'pad_token_id'
The demo code should work well.
System Info
transformers == 5.0.0
Who can help?
@zucchini-nlp
Information
Tasks
examplesfolder (such as GLUE/SQuAD, ...)Reproduction
To reproduce:
Expected behavior
The demo code should work well.