import gradio as gr from neon_llm_chatgpt.chatgpt import ChatGPT from utils import convert_history import os model_choices = [ "gpt-3.5-turbo", "gpt-4" ] key = os.environ['OPENAI_API_KEY'] config = { "key": key, "model": model_choices[0], "role": "You are trying to give a short answer in less than 40 words.", "context_depth": 3, "max_tokens": 256, } chatgpt = ChatGPT(config) def ask(message, history, persona): chat_history = convert_history(history) responce = chatgpt.ask(message, chat_history, persona = {"description": persona}) return responce demo = gr.ChatInterface(ask, additional_inputs=[ gr.Textbox(chatgpt.role, label="Persona"), #gr.Dropdown(choices=model_choices, value=model_choices[0], label="Model") ] ) if __name__ == "__main__": demo.queue().launch()