Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import torch
- from safetensors.torch import load_file, save_file
- import os
- import json
- lora1_path = "./lora1"
- lora2_path = "./lora2"
- output_path = "./merged_lora"
- alpha = 0.1 # вес первой LoRA
- beta = 0.9 # вес второй LoRA
- # Загружаем веса
- lora1 = load_file(os.path.join(lora1_path, "adapter_model.safetensors"))
- lora2 = load_file(os.path.join(lora2_path, "adapter_model.safetensors"))
- # Проверяем, что ключи совпадают
- if set(lora1.keys()) != set(lora2.keys()):
- raise ValueError("LoRA ключи не совпадают между адаптерами")
- # Смешиваем веса
- merged = {}
- for key in lora1:
- merged[key] = alpha * lora1[key] + beta * lora2[key]
- # Создаём выходную папку
- os.makedirs(output_path, exist_ok=True)
- # Сохраняем объединённый адаптер
- save_file(merged, os.path.join(output_path, "adapter_model.safetensors"))
- # Копируем adapter_config.json (можно взять любой из двух)
- with open(os.path.join(lora1_path, "adapter_config.json")) as f:
- config = json.load(f)
- with open(os.path.join(output_path, "adapter_config.json"), "w") as f:
- json.dump(config, f, indent=2)
- print("✅ LoRA успешно объединена и сохранена в:", output_path)
Advertisement
Add Comment
Please, Sign In to add comment