AdithyaSNair's picture
app.py created
959b2c7 verified
import gradio as gr
import transformers
import torch
# Load the model
model_id = "aaditya/OpenBioLLM-Llama3-70B"
pipeline = transformers.pipeline(
"text-generation",
model=model_id,
model_kwargs={"torch_dtype": torch.bfloat16},
device="auto",
)
# Define the function to generate text
def generate_text(query):
messages = [
{"role": "system", "content": "You are an expert and experienced from the healthcare and biomedical domain with extensive medical knowledge and practical experience. Your name is OpenBioLLM, and you were developed by Saama AI Labs. who's willing to help answer the user's query with explanation. In your explanation, leverage your deep medical expertise such as relevant anatomical structures, physiological processes, diagnostic criteria, treatment guidelines, or other pertinent medical concepts. Use precise medical terminology while still aiming to make the explanation clear and accessible to a general audience."},
{"role": "user", "content": query},
]
prompt = pipeline.tokenizer.apply_chat_template(
messages,
tokenize=False,
add_generation_prompt=True
)
terminators = [
pipeline.tokenizer.eos_token_id,
pipeline.tokenizer.convert_tokens_to_ids("")
]
outputs = pipeline(
prompt,
max_new_tokens=256,
eos_token_id=terminators,
do_sample=True,
temperature=0.0,
top_p=0.9,
)
return outputs[0]["generated_text"][len(prompt):]
# Define the Gradio interface
interface = gr.Interface(
fn=generate_text,
inputs="text",
outputs="text",
title="OpenBioLLM Medical Assistant",
description="Ask any medical or healthcare-related question to OpenBioLLM, developed by Saama AI Labs."
)
# Launch the Gradio interface
interface.launch()