pissa_init.py 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687
  1. # coding=utf-8
  2. # Copyright 2024 HuggingFace Inc. and the LlamaFactory team.
  3. #
  4. # This code is based on the HuggingFace's PEFT library.
  5. # https://github.com/huggingface/peft/blob/v0.11.0/examples/pissa_finetuning/preprocess.py
  6. #
  7. # Licensed under the Apache License, Version 2.0 (the "License");
  8. # you may not use this file except in compliance with the License.
  9. # You may obtain a copy of the License at
  10. #
  11. # http://www.apache.org/licenses/LICENSE-2.0
  12. #
  13. # Unless required by applicable law or agreed to in writing, software
  14. # distributed under the License is distributed on an "AS IS" BASIS,
  15. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  16. # See the License for the specific language governing permissions and
  17. # limitations under the License.
  18. import os
  19. from typing import TYPE_CHECKING
  20. import fire
  21. from peft import LoraConfig, TaskType, get_peft_model
  22. from transformers import AutoModelForCausalLM, AutoTokenizer
  23. if TYPE_CHECKING:
  24. from transformers import PreTrainedModel
  25. def quantize_pissa(
  26. model_name_or_path: str,
  27. output_dir: str,
  28. pissa_iter: int = 16,
  29. lora_alpha: int = None,
  30. lora_rank: int = 16,
  31. lora_dropout: float = 0,
  32. lora_target: tuple = ("q_proj", "v_proj"),
  33. save_safetensors: bool = True,
  34. ):
  35. r"""
  36. Initializes LoRA weights with Principal Singular values and Singular vectors Adaptation (PiSSA)
  37. Usage: python pissa_init.py --model_name_or_path path_to_model --output_dir output_dir
  38. """
  39. if isinstance(lora_target, str):
  40. lora_target = [name.strip() for name in lora_target.split(",")]
  41. tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, trust_remote_code=True)
  42. model = AutoModelForCausalLM.from_pretrained(model_name_or_path, trust_remote_code=True, torch_dtype="auto")
  43. lora_config = LoraConfig(
  44. task_type=TaskType.CAUSAL_LM,
  45. r=lora_rank,
  46. lora_alpha=lora_alpha if lora_alpha is not None else lora_rank * 2,
  47. lora_dropout=lora_dropout,
  48. target_modules=lora_target,
  49. init_lora_weights="pissa" if pissa_iter == -1 else "pissa_niter_{}".format(pissa_iter),
  50. )
  51. # Init PiSSA model
  52. peft_model = get_peft_model(model, lora_config)
  53. pissa_dir = os.path.join(output_dir, "pissa_init")
  54. # Save PiSSA model
  55. setattr(peft_model.peft_config["default"], "base_model_name_or_path", os.path.abspath(output_dir))
  56. setattr(peft_model.peft_config["default"], "init_lora_weights", True) # don't apply pissa again
  57. peft_model.save_pretrained(pissa_dir, safe_serialization=save_safetensors)
  58. print("Adapter weights saved in {}".format(pissa_dir))
  59. # Save base model
  60. base_model: "PreTrainedModel" = peft_model.unload()
  61. base_model.save_pretrained(output_dir, safe_serialization=save_safetensors)
  62. tokenizer.save_pretrained(output_dir)
  63. print("Model weights saved in {}".format(output_dir))
  64. print("- Fine-tune this model with:")
  65. print("model_name_or_path: {}".format(output_dir))
  66. print("adapter_name_or_path: {}".format(pissa_dir))
  67. print("finetuning_type: lora")
  68. print("pissa_init: false")
  69. print("pissa_convert: true")
  70. print("- and optionally with:")
  71. print("quantization_bit: 4")
  72. if __name__ == "__main__":
  73. fire.Fire(quantize_pissa)