Spaces:
Sleeping
Sleeping
import gradio as gr | |
import google.generativeai as genai | |
import os | |
def chat_with_gemini(user_api_key, user_input, history): | |
""" | |
Generates a response from the Gemini API based on user input and conversation history, | |
using the provided user API key or falling back on a default API key. | |
Args: | |
user_api_key (str): The user's Google Gemini API key (optional). | |
user_input (str): The latest message from the user. | |
history (list): The conversation history as a list of message pairs. | |
Returns: | |
tuple: The updated conversation history with the chatbot's reply. | |
""" | |
# Determine which API key to use | |
api_key = user_api_key.strip() if user_api_key else os.getenv("YOUR_API_KEY") | |
if not api_key: | |
# If no API key is available, prompt the user | |
history.append(["", "Please enter your Google Gemini API key to start the conversation."]) | |
return history, history | |
try: | |
# Configure the Gemini API with the selected API key | |
genai.configure(api_key=api_key) | |
# Initialize the Gemini Generative Model | |
model = genai.GenerativeModel("gemini-1.5-flash") | |
# Generate a response from the Gemini API | |
response = model.generate_content( | |
user_input, | |
generation_config=genai.GenerationConfig( | |
max_output_tokens=2000, | |
temperature=0.7 | |
) | |
) | |
chatbot_reply = response.text.strip() | |
# Append the user input and chatbot reply to the history as a single entry | |
history.append([user_input, chatbot_reply]) | |
return history, history | |
except Exception as e: | |
error_message = f"An error occurred: {e}" | |
history.append(["", error_message]) | |
return history, history | |
with gr.Blocks() as iface: | |
gr.Markdown(""" | |
# Google Gemini Flash 1.5 Chatbot | |
Welcome to the Google Gemini-powered chatbot! You can interact with the bot by typing your messages below. | |
**API Key Setup:** | |
- **Option 1:** Enter your own Google Gemini API key in the input field below. | |
- **Option 2:** If you leave the API key field empty, the chatbot will use a default API key. | |
> **Note:** Ensure that your API key is kept secure and do not share it publicly. | |
""") | |
with gr.Column(): | |
# API Key Input Section | |
with gr.Row(): | |
api_key_input = gr.Textbox( | |
label="Google Gemini API Key (Optional)", | |
placeholder="Enter your API key here...", | |
type="password", | |
lines=1 | |
) | |
# Chatbot Display | |
chatbot = gr.Chatbot() | |
# User Input Row | |
with gr.Row(): | |
user_input = gr.Textbox( | |
placeholder="Type your message here...", | |
show_label=False | |
) | |
send_button = gr.Button("Send") | |
# State to hold the conversation history | |
history = gr.State([]) | |
def respond(user_api_key, message, history_state): | |
""" | |
Handles the user message, generates a response using the provided or default API key, | |
and updates the conversation history. | |
Args: | |
user_api_key (str): The user's API key (optional). | |
message (str): The user's message. | |
history_state (list): The current conversation history. | |
Returns: | |
tuple: Updated conversation history for display. | |
""" | |
updated_history, new_history = chat_with_gemini(user_api_key, message, history_state) | |
return updated_history, new_history | |
# Connect the send button and textbox submission to the respond function | |
send_button.click(respond, inputs=[api_key_input, user_input, history], outputs=[chatbot, history]) | |
user_input.submit(respond, inputs=[api_key_input, user_input, history], outputs=[chatbot, history]) | |
if __name__ == "__main__": | |
iface.launch() |