Update app.py
Browse files
app.py
CHANGED
@@ -30,7 +30,7 @@ with st.sidebar:
|
|
30 |
elif selected_model == 'Llama3-70B-Instruct':
|
31 |
llm = 'meta/meta-llama-3-70b-instruct'
|
32 |
|
33 |
-
st.sidebar.
|
34 |
user_input = st.sidebar.text_area("Context for Fine-tuning:", height=10)
|
35 |
|
36 |
temperature = st.sidebar.slider('temperature', min_value=0.01, max_value=1.0, value=0.1, step=0.01)
|
@@ -53,7 +53,7 @@ st.sidebar.button('Clear Chat History', on_click=clear_chat_history)
|
|
53 |
|
54 |
# Function for generating LLaMA2 response. Refactored from https://github.com/a16z-infra/llama2-chatbot
|
55 |
def generate_llama2_response(prompt_input):
|
56 |
-
string_dialogue =
|
57 |
for dict_message in st.session_state.messages:
|
58 |
if dict_message["role"] == "user":
|
59 |
string_dialogue += "User: " + dict_message["content"] + "\n\n"
|
|
|
30 |
elif selected_model == 'Llama3-70B-Instruct':
|
31 |
llm = 'meta/meta-llama-3-70b-instruct'
|
32 |
|
33 |
+
st.sidebar.subheader("System Prompt")
|
34 |
user_input = st.sidebar.text_area("Context for Fine-tuning:", height=10)
|
35 |
|
36 |
temperature = st.sidebar.slider('temperature', min_value=0.01, max_value=1.0, value=0.1, step=0.01)
|
|
|
53 |
|
54 |
# Function for generating LLaMA2 response. Refactored from https://github.com/a16z-infra/llama2-chatbot
|
55 |
def generate_llama2_response(prompt_input):
|
56 |
+
string_dialogue = user_input
|
57 |
for dict_message in st.session_state.messages:
|
58 |
if dict_message["role"] == "user":
|
59 |
string_dialogue += "User: " + dict_message["content"] + "\n\n"
|