Spaces:
Running
Running
# Copyright 2024 the LlamaFactory team. | |
# | |
# Licensed under the Apache License, Version 2.0 (the "License"); | |
# you may not use this file except in compliance with the License. | |
# You may obtain a copy of the License at | |
# | |
# http://www.apache.org/licenses/LICENSE-2.0 | |
# | |
# Unless required by applicable law or agreed to in writing, software | |
# distributed under the License is distributed on an "AS IS" BASIS, | |
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
# See the License for the specific language governing permissions and | |
# limitations under the License. | |
import os | |
from types import MethodType | |
from typing import TYPE_CHECKING, Dict, Optional | |
from transformers import Trainer | |
from ...extras.logging import get_logger | |
from ..trainer_utils import convert_pissa_adapter, create_custom_optimzer, create_custom_scheduler | |
if TYPE_CHECKING: | |
import torch | |
from transformers import ProcessorMixin | |
from ...hparams import FinetuningArguments | |
logger = get_logger(__name__) | |
class CustomTrainer(Trainer): | |
r""" | |
Inherits Trainer for custom optimizer. | |
""" | |
def __init__( | |
self, finetuning_args: "FinetuningArguments", processor: Optional["ProcessorMixin"], **kwargs | |
) -> None: | |
super().__init__(**kwargs) | |
self.finetuning_args = finetuning_args | |
self.processor = processor | |
if finetuning_args.pissa_convert: | |
self.save_model(os.path.join(self.args.output_dir, "pissa_init")) | |
if finetuning_args.use_badam: | |
from badam import clip_grad_norm_for_sparse_tensor | |
self.accelerator.clip_grad_norm_ = MethodType(clip_grad_norm_for_sparse_tensor, self.accelerator) | |
def create_optimizer(self) -> "torch.optim.Optimizer": | |
if self.optimizer is None: | |
self.optimizer = create_custom_optimzer(self.model, self.args, self.finetuning_args) | |
return super().create_optimizer() | |
def create_scheduler( | |
self, num_training_steps: int, optimizer: Optional["torch.optim.Optimizer"] = None | |
) -> "torch.optim.lr_scheduler.LRScheduler": | |
create_custom_scheduler(self.args, num_training_steps, optimizer) | |
return super().create_scheduler(num_training_steps, optimizer) | |
def _save(self, output_dir: Optional[str] = None, state_dict: Optional[Dict[str, "torch.Tensor"]] = None) -> None: | |
super()._save(output_dir, state_dict) | |
output_dir = output_dir if output_dir is not None else self.args.output_dir | |
if self.finetuning_args.pissa_convert: | |
convert_pissa_adapter(output_dir, state_dict, self.accelerator, self.model, self.args) | |
if self.processor is not None: | |
getattr(self.processor, "image_processor").save_pretrained(output_dir) | |