|
import os |
|
from safetensors.torch import load_file, save_file |
|
from transformers import AutoTokenizer, AutoModel |
|
from diffusers import StableDiffusionPipeline |
|
import torch |
|
|
|
|
|
file1_path = "lora.TA_trained (2).safetensors" |
|
file2_path = "my_first_flux_lora_v1.safetensors" |
|
file3_path = "NSFW_master.safetensors" |
|
merged_file_path = "merged_lora.safetensors" |
|
|
|
|
|
weight1 = 0.8 |
|
weight2 = 0.2 |
|
|
|
def load_and_weight_tensors(file_path, weight): |
|
tensors = load_file(file_path) |
|
|
|
weighted_tensors = {key: weight * tensor for key, tensor in tensors.items()} |
|
return weighted_tensors |
|
|
|
try: |
|
|
|
tensors1 = load_and_weight_tensors(file1_path, weight1) |
|
tensors2 = load_and_weight_tensors(file2_path, weight2) |
|
|
|
|
|
merged_tensors = {**tensors1} |
|
for key in tensors2: |
|
if key in merged_tensors: |
|
merged_tensors[key] += tensors2[key] |
|
else: |
|
merged_tensors[key] = tensors2[key] |
|
|
|
|
|
save_file(merged_tensors, merged_file_path) |
|
print(f"Merged file with weights saved at: {merged_file_path}") |
|
|
|
|
|
merged_load = load_file(merged_file_path) |
|
print("Keys in merged file:", merged_load.keys()) |
|
|
|
try: |
|
|
|
tokenizer = AutoTokenizer.from_pretrained("Tjay143/Hijab2") |
|
model = AutoModel.from_pretrained("Tjay143/Hijab2", from_tf=False, from_safetensors=True) |
|
pipeline = StableDiffusionPipeline.from_pretrained("Tjay143/Hijab2", torch_dtype=torch.float16) |
|
print("Pipeline loaded successfully!") |
|
except Exception as e: |
|
print(f"Error loading model/pipeline: {e}") |
|
except Exception as e: |
|
print(f"An error occurred: {e}") |
|
|