Conversation
|
The docs for this PR live here. All of your documentation changes will be reflected on that endpoint. The docs are available until 30 days after the last update. |
| formatting_func=formatting_prompts_func, | ||
| ) | ||
|
|
||
| def test_sft_trainer_with_model_num_train_epochs(self): |
There was a problem hiding this comment.
test removed, not sure what it was testing
|
|
||
| self.assertIsNotNone(trainer.state.log_history[-1]["train_loss"]) | ||
|
|
||
| def test_with_multiple_eval_datasets(self): |
There was a problem hiding this comment.
moved to SFTTrainerTester2
| result_text2 = tokenizer.decode(non_masked_tokens2) | ||
| self.assertEqual(result_text2, " I should not be masked. I should not be masked too.") | ||
|
|
||
| def test_with_model_neftune(self): |
There was a problem hiding this comment.
removed, neftune support support is now in transformers
| self.assertEqual(len(trainer.model.get_input_embeddings()._forward_hooks), 0) | ||
|
|
||
| @require_peft | ||
| def test_peft_str(self): |
There was a problem hiding this comment.
Moved to SFTTrainerTester2
| ) | ||
|
|
||
| @require_peft | ||
| def test_peft_sft_trainer(self): |
There was a problem hiding this comment.
likely a duplicate of the above, removed
| self.assertIsNotNone(trainer.state.log_history[-1]["train_loss"]) | ||
|
|
||
| @require_peft | ||
| def test_peft_and_gradient_checkpointing(self): |
There was a problem hiding this comment.
Moved to SFTTrainerTester2
| self.assertIsNotNone(trainer.state.log_history[-1]["train_loss"]) | ||
|
|
||
| @require_peft | ||
| def test_peft_neftune(self): |
There was a problem hiding this comment.
removed, neftune support is now part of transformers
| self.assertEqual(len(trainer.model.get_input_embeddings()._forward_hooks), 0) | ||
|
|
||
| @require_peft | ||
| def test_peft_tag(self): |
There was a problem hiding this comment.
Moved to SFTTrainerTester2
| self.assertIn(tag, trainer.model.model_tags) | ||
|
|
||
| @require_peft | ||
| def test_tag(self): |
There was a problem hiding this comment.
Moved to SFTTrainerTester2
|
|
||
| self.assertIsNotNone(trainer.state.log_history[-1]["train_loss"]) | ||
|
|
||
| def test_torch_dtype(self): |
There was a problem hiding this comment.
Moved to SFTTrainerTester2
What does this PR do?
Fixes # (issue)
Before submitting
Pull Request section?
to it if that's the case.
Who can review?
Anyone in the community is free to review the PR once the tests have passed. Feel free to tag
members/contributors who may be interested in your PR.