File size: 1,422 Bytes
f098411 b62c459 f098411 b62c459 f098411 b62c459 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 |
import os
from safetensors.torch import load_file, save_file
from transformers import AutoTokenizer, AutoModel
from diffusers import StableDiffusionPipeline
import torch
# Paths to your LoRA files
file1_path = "hansika motwaniF1.safetensors"
file2_path = "lora.safetensors"
file3_path = "NSFW_master.safetensors"
merged_file_path = "merged_lora.safetensors"
def load_tensors(file_path):
return load_file(file_path)
try:
tensors1 = load_tensors(file1_path)
tensors2 = load_tensors(file2_path)
tensors3 = load_tensors(file3_path)
# Merge tensors
merged_tensors = {**tensors1, **tensors2, **tensors3}
# Save merged tensors
save_file(merged_tensors, merged_file_path)
print(f"Merged file saved at: {merged_file_path}")
# Validate the merged file
merged_load = load_file(merged_file_path)
print(merged_load.keys())
try:
# Load the tokenizer and model
tokenizer = AutoTokenizer.from_pretrained("Tjay143/HijabGirls")
model = AutoModel.from_pretrained("Tjay143/HijabGirls", from_tf=False, from_safetensors=True)
pipeline = StableDiffusionPipeline.from_pretrained("Tjay143/HijabGirls", torch_dtype=torch.float16)
print("It is successfully loaded!")
except Exception as e:
print(f"Error: {e}")
except Exception as e:
print(f"An error occurred: {e}")
|