MarianCG-CoNaLa / app.py
AhmedSSoliman's picture
Update app.py
0e0cbbc
raw
history blame
949 Bytes
import torch
import transformers
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
import gradio as gr
tokenizer = AutoTokenizer.from_pretrained("AhmedSSoliman/MarianCG-CoNaLa")
model = AutoModelForSeq2SeqLM.from_pretrained("AhmedSSoliman/MarianCG-CoNaLa")
def generate_code(NL):
inputs = tokenizer(NL, padding="max_length", truncation=True, max_length=512, return_tensors="pt")
input_ids = inputs.input_ids
attention_mask = inputs.attention_mask
outputs = model.generate(input_ids, attention_mask=attention_mask)
output_code = tokenizer.decode(outputs[0], skip_special_tokens=True)
return output_code
iface = gr.Interface(fn=generate_code, inputs="text", outputs="text")
iface.launch()
#iface.launch(share=True)
#output_text = gr.outputs.Textbox()
#gr.Interface(generate_code,"textbox", output_text, title="MarianCG model for Code Generation", description="MarianCG model for Code Generation").launch()