billythecook / app.py
Maouu's picture
Create app.py
edf1f38 verified
raw
history blame
No virus
714 Bytes
import gradio as gr
from transformers import PeftModel, PeftConfig, AutoModelForCausalLM
# Load the PEFT model
config = PeftConfig.from_pretrained("Maouu/billythecook")
base_model = AutoModelForCausalLM.from_pretrained("unsloth/llama-3-8b-bnb-4bit")
model = PeftModel.from_pretrained("unsloth/llama-3-8b-bnb-4bit", config=config)
def generate_text(prompt):
# Generate text using the model
output = model.generate(prompt, max_length=50, num_return_sequences=1, return_tensors="pt")
generated_text = model.tokenizer.decode(output[0], skip_special_tokens=True)
return generated_text
iface = gr.Interface(fn=generate_text, inputs="text", outputs="text", title="PEFT Text Generation")
iface.launch()