Spaces:
Sleeping
Sleeping
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
|
3 |
+
import streamlit as st # Streamlit library for creating web apps
|
4 |
+
from openai import OpenAI # New import for OpenAI client
|
5 |
+
|
6 |
+
# Instantiate the OpenAI client with the API key from Streamlit secrets
|
7 |
+
client = OpenAI(api_key=os.environ["OPENAI_API_KEY"])
|
8 |
+
|
9 |
+
|
10 |
+
# Define a function for running a conversation with OpenAI
|
11 |
+
def run_prompt_through_openai(system_prompt, user_prompt, model='gpt-3.5-turbo'):
|
12 |
+
# Use the OpenAI client to create a ChatCompletion instance for the conversation
|
13 |
+
chat_completion = client.chat.completions.create(
|
14 |
+
model=model, # Specify the model to use for the conversation
|
15 |
+
messages=[ # Define the messages for the conversation
|
16 |
+
{'role': 'system', 'content': system_prompt}, # The initial system message
|
17 |
+
{'role': 'user', 'content': user_prompt} # The user's message
|
18 |
+
],
|
19 |
+
temperature=0.9,
|
20 |
+
top_p=1,
|
21 |
+
)
|
22 |
+
# Extract and return the AI's response from the conversation
|
23 |
+
return chat_completion.choices[0].message.content
|
24 |
+
|
25 |
+
|
26 |
+
# Set a title for the Streamlit app
|
27 |
+
st.title("Single Input Example")
|
28 |
+
st.write("This is an example of a single input field for a conversation with the AI.")
|
29 |
+
|
30 |
+
# Create a text input field for the system prompt, with a default value
|
31 |
+
system_prompt = "You are a Twitter bot that helps people with their tweets"
|
32 |
+
|
33 |
+
# Create a text input field for the user prompt, with a default value
|
34 |
+
user_input = st.text_input("Description of a tweet you want", value="I need a tweet about GPT-4")
|
35 |
+
|
36 |
+
user_prompt = '''Input: I need a tweet about GPT-4
|
37 |
+
Tweet: "Wow! I just read about GPT-4 and it's amazing! I can't wait to see what it can do! #gpt4 #ai #machinelearning"
|
38 |
+
Input: Dogs in the summer and avoiding fleas and ticks
|
39 |
+
Tweet: "I love my dog, but I hate the fleas and ticks that come with him. I'm going to try to avoid them this summer."
|
40 |
+
Input: San Francisco's Golden Gate Bridge
|
41 |
+
Tweet: "I love the Golden Gate Bridge. It's a beautiful sight to see. I can't wait to go back to San Francisco."
|
42 |
+
Input: {user_input}
|
43 |
+
Tweet:''' # This is where the user's input will be added
|
44 |
+
|
45 |
+
# Create a button for executing the AI conversation
|
46 |
+
if st.button("Run"):
|
47 |
+
user_prompt = user_prompt.format(user_input=user_input)
|
48 |
+
# If the button is clicked, run the user and system prompts through the chosen AI model
|
49 |
+
response = run_prompt_through_openai(system_prompt, user_prompt)
|
50 |
+
|
51 |
+
# Write the AI's response in the app.
|
52 |
+
st.markdown(f"```\n# System Prompt\n---\n{system_prompt}\n```")
|
53 |
+
st.markdown(f"```\n# User Prompt\n---\n{user_prompt}\n```")
|
54 |
+
st.markdown(f"```\n# AI Response\n---\n{response}\n```")
|
55 |
+
|