File size: 651 Bytes
1f780ee
 
 
e8695e0
1f780ee
e8695e0
 
1f780ee
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
import gradio as gr
from transformers import pipeline
import torch
import os

pipe = pipeline("text-generation", model="gplsi/Aitana-6.3B", torch_dtype=torch.bfloat16, device_map="auto",
                token=os.environ['gplsi_models'])

def predict(input_text):
    generation = pipe(input_text, max_new_tokens=50, repetition_penalty=1.2, top_k=50, top_p=0.95, do_sample=True,
                      temperature=0.5, early_stopping=True, num_beams=2)

    return generation[0]

gradio_app = gr.Interface(
    predict,
    inputs='text',
    outputs='text',
    title="Aitana-6.3B Text Generation",
)

if __name__ == "__main__":
    gradio_app.launch()