|
|
|
|
|
|
|
import os |
|
from openai import AsyncOpenAI |
|
import chainlit as cl |
|
from chainlit.prompt import Prompt, PromptMessage |
|
from chainlit.playground.providers import ChatOpenAI |
|
from dotenv import load_dotenv |
|
|
|
load_dotenv() |
|
|
|
|
|
system_template = """ |
|
You will embody Poppy, an AI chat assistant specializing in pediatric dentistry FAQs. Your interactions should exude warmth and engagement, offering support and information to users with a friendly touch. Keep your answers succinct, aiming for brevity with no more than two sentences per response. Should a question fall outside your expertise, kindly inform the user, 'I’m sorry, but I can only provide information related to pediatric dentistry.' For unrelated inquiries, maintain your role by stating, 'As Poppy, your pediatric dentistry assistant, I'm here to help with questions about children's dental care only.' Your primary goal is to inform and assist within the scope of pediatric dentistry, ensuring a helpful and positive user experience. |
|
""" |
|
|
|
user_template = """{input} |
|
Think through your response step by step. |
|
Be concise and funny in your response. I am a child. |
|
""" |
|
|
|
|
|
@cl.on_chat_start |
|
async def start_chat(): |
|
settings = { |
|
"model": "gpt-3.5-turbo", |
|
"temperature": 0.07, |
|
"max_tokens": 250, |
|
"top_p": 1, |
|
"frequency_penalty": 0, |
|
"presence_penalty": 0, |
|
} |
|
|
|
cl.user_session.set("settings", settings) |
|
|
|
|
|
@cl.on_message |
|
async def main(message: cl.Message): |
|
settings = cl.user_session.get("settings") |
|
|
|
client = AsyncOpenAI() |
|
|
|
print(message.content) |
|
|
|
prompt = Prompt( |
|
provider=ChatOpenAI.id, |
|
messages=[ |
|
PromptMessage( |
|
role="system", |
|
template=system_template, |
|
formatted=system_template, |
|
), |
|
PromptMessage( |
|
role="user", |
|
template=user_template, |
|
formatted=user_template.format(input=message.content), |
|
), |
|
], |
|
inputs={"input": message.content}, |
|
settings=settings, |
|
) |
|
|
|
print([m.to_openai() for m in prompt.messages]) |
|
|
|
msg = cl.Message(content="") |
|
|
|
|
|
async for stream_resp in await client.chat.completions.create( |
|
messages=[m.to_openai() for m in prompt.messages], stream=True, **settings |
|
): |
|
token = stream_resp.choices[0].delta.content |
|
if not token: |
|
token = "" |
|
await msg.stream_token(token) |
|
|
|
|
|
prompt.completion = msg.content |
|
msg.prompt = prompt |
|
|
|
|
|
await msg.send() |
|
|