eagle0504 commited on
Commit
86ea792
1 Parent(s): 4f19546

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +51 -0
app.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from openai import OpenAI # Assuming Nvidia client is available in the same library, adjust if necessary
2
+ import streamlit as st
3
+
4
+ # Initialize Nvidia client
5
+ client = OpenAI(
6
+ base_url="https://integrate.api.nvidia.com/v1", # Nvidia API endpoint
7
+ api_key=st.secrets["NVIDIA_API_KEY"] # Nvidia API Key from Streamlit secrets
8
+ )
9
+
10
+ st.title("ChatGPT-like clone with Nvidia Model")
11
+
12
+ # Initialize session state variables if not already present
13
+ if "openai_model" not in st.session_state:
14
+ st.session_state["openai_model"] = "nvidia/llama-3.1-nemotron-70b-instruct"
15
+
16
+ if "messages" not in st.session_state:
17
+ # Adding the initial system message
18
+ st.session_state.messages = [{"role": "system", "content": "You are a helpful assistant."}]
19
+
20
+ # Render the chat history
21
+ for message in st.session_state.messages:
22
+ with st.chat_message(message["role"]):
23
+ st.markdown(message["content"])
24
+
25
+ # Get new user input
26
+ if prompt := st.chat_input("What is up?"):
27
+ # Add user message to the session state
28
+ st.session_state.messages.append({"role": "user", "content": prompt})
29
+ with st.chat_message("user"):
30
+ st.markdown(prompt)
31
+
32
+ # Display assistant's message while waiting for the response
33
+ with st.chat_message("assistant"):
34
+ # Create Nvidia completion request with full conversation history
35
+ stream = client.chat.completions.create(
36
+ model=st.session_state["openai_model"],
37
+ messages=st.session_state.messages, # Include all previous messages in the API call
38
+ temperature=0.5,
39
+ top_p=0.7,
40
+ max_tokens=1024,
41
+ stream=True,
42
+ )
43
+ response = ""
44
+ for chunk in stream:
45
+ if chunk.choices[0].delta.content is not None:
46
+ chunk_text = chunk.choices[0].delta.content
47
+ response += chunk_text
48
+ st.write(chunk_text, end="")
49
+
50
+ # Store the assistant response in the session state
51
+ st.session_state.messages.append({"role": "assistant", "content": response})