Guest User

Untitled

a guest
Aug 6th, 2025
49
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.34 KB | None | 0 0
  1. import torch
  2. from safetensors.torch import load_file, save_file
  3. import os
  4. import json
  5.  
  6. lora1_path = "./lora1"
  7. lora2_path = "./lora2"
  8. output_path = "./merged_lora"
  9. alpha = 0.1 # вес первой LoRA
  10. beta = 0.9 # вес второй LoRA
  11.  
  12. # Загружаем веса
  13. lora1 = load_file(os.path.join(lora1_path, "adapter_model.safetensors"))
  14. lora2 = load_file(os.path.join(lora2_path, "adapter_model.safetensors"))
  15.  
  16. # Проверяем, что ключи совпадают
  17. if set(lora1.keys()) != set(lora2.keys()):
  18. raise ValueError("LoRA ключи не совпадают между адаптерами")
  19.  
  20. # Смешиваем веса
  21. merged = {}
  22. for key in lora1:
  23. merged[key] = alpha * lora1[key] + beta * lora2[key]
  24.  
  25. # Создаём выходную папку
  26. os.makedirs(output_path, exist_ok=True)
  27.  
  28. # Сохраняем объединённый адаптер
  29. save_file(merged, os.path.join(output_path, "adapter_model.safetensors"))
  30.  
  31. # Копируем adapter_config.json (можно взять любой из двух)
  32. with open(os.path.join(lora1_path, "adapter_config.json")) as f:
  33. config = json.load(f)
  34.  
  35. with open(os.path.join(output_path, "adapter_config.json"), "w") as f:
  36. json.dump(config, f, indent=2)
  37.  
  38. print("✅ LoRA успешно объединена и сохранена в:", output_path)
Advertisement
Add Comment
Please, Sign In to add comment