diff --git a/tests/peft_integration/test_peft_integration.py b/tests/peft_integration/test_peft_integration.py index 33880c88135d..dc30cbf96d4e 100644 --- a/tests/peft_integration/test_peft_integration.py +++ b/tests/peft_integration/test_peft_integration.py @@ -61,8 +61,6 @@ class PeftTesterMixin: transformers_test_model_classes = (AutoModelForCausalLM, OPTForCausalLM) -# TODO: run it with CI after PEFT release. -@slow class PeftIntegrationTester(unittest.TestCase, PeftTesterMixin): """ A testing suite that makes sure that the PeftModel class is correctly integrated into the transformers library. @@ -1003,7 +1001,7 @@ def test_mixtral_lora_conversion(self): if version.parse(importlib.metadata.version("peft")) < version.parse("0.19.0"): self.skipTest("For this test to pass, PEFT 0.19 is required.") - inputs = torch.arange(10).view(1, -1).to(0) + inputs = torch.arange(10).view(1, -1).to(torch_device) model_name = "hf-internal-testing/Mixtral-tiny" adapter_name = "peft-internal-testing/mixtral-pre-v5-lora"