Spaces:
Running
Running
File size: 2,221 Bytes
5c9c53f 3665408 5c9c53f 3665408 5c9c53f 25eb713 3665408 5c9c53f b973eef 5c9c53f 3665408 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 |
import streamlit as st
import os
from openai import OpenAI
# Initialize the Nvidia API client using API Key stored in Streamlit secrets
client =OpenAI(
base_url = "https://integrate.api.nvidia.com/v1",
api_key = os.getenv("NVIDIA_API_KEY")
)
# Define Streamlit app layout
st.title("AWS Well-Architected Review")
st.write("Get recommendations for optimizing your AWS architecture.")
if "messages" not in st.session_state:
st.session_state.messages = [
{"role": "system", "content": "You are an assistant that provides recommendations based on AWS Well-Architected Review best practices. Focus on the 5 pillars: Operational Excellence, Security, Reliability, Performance Efficiency, and Cost Optimization."}
]
# User input for AWS architecture description
architecture_input = st.text_area("Describe your AWS architecture:")
# Button to submit the input
if st.button("Get Recommendations"):
if architecture_input:
# Add user input to the conversation
st.session_state.messages.append({"role": "user", "content": architecture_input})
with st.chat_message("assistant"):
with st.spinner("Generating recommendations..."):
# Create Nvidia completion request with conversation history
stream = client.chat.completions.create(
model="nvidia/llama-3.1-nemotron-70b-instruct", # Nvidia model name
messages=st.session_state.messages, # Include all messages in the API call
temperature=0.5,
top_p=0.7,
max_tokens=1024,
stream=True,
)
response_chunks = []
for chunk in stream:
if chunk.choices[0].delta.content is not None:
response_chunks.append(chunk.choices[0].delta.content)
response = "".join(response_chunks)
# Display the response as recommendations
st.markdown(f"**Recommendations:**\n\n{response}")
# Add response to conversation history
st.session_state.messages.append({"role": "assistant", "content": response})
|