import os import gradio as gr import google.generativeai as genai from dotenv import load_dotenv import time # Load environment variables from .env file load_dotenv() # Retrieve API key from environment variable GEMINI_API_KEY = "AIzaSyBpVWF58hw8pIEV3kBM9ckpKBGwXKhDNLo" # public api # Configure Google Gemini API genai.configure(api_key=GEMINI_API_KEY) # Create the model configuration generation_config = { "temperature": 0.7, "top_p": 0.95, "top_k": 64, "max_output_tokens": 512, # Adjust as needed "response_mime_type": "text/plain", } # Simplified safety settings (or try removing them to test) safety_settings = [ {"category": "HARM_CATEGORY_HARASSMENT", "threshold": "BLOCK_NONE"}, {"category": "HARM_CATEGORY_HATE_SPEECH", "threshold": "BLOCK_NONE"} ] # Function to generate a response based on user input and chat history def generate_response(user_input, chat_history): """Generates a response based on user input and chat history.""" # Update system content with the full character description updated_system_content = "You are Shadow the Hedgehog and you must act like Shadow the Hedgehog's personality." # Create the generative model model = genai.GenerativeModel( model_name="gemini-1.5-pro", generation_config=generation_config, safety_settings=safety_settings, system_instruction=updated_system_content, ) # Add user input to history chat_history.append(user_input) # Limit history length to the last 10 messages chat_history = chat_history[-10:] retry_attempts = 3 for attempt in range(retry_attempts): try: # Start a new chat session chat_session = model.start_chat() # Send the entire chat history as the first message response = chat_session.send_message("\n".join(chat_history)) return response.text, chat_history except Exception as e: if attempt < retry_attempts - 1: time.sleep(2) # Delay before retrying continue else: return f"Error after {retry_attempts} attempts: {str(e)}", chat_history # Build the Gradio interface with gr.Blocks(theme="Hev832/Applio") as iface: gr.Markdown("Duplicate this space in case there is an error or something with your own gemini api key!") chat_input = gr.Textbox(lines=2, label="Talk to AI", placeholder="Enter your message here...") chat_history_state = gr.State([]) # State input for chat history response_output = gr.Textbox(label="Response") # Define the layout and components generate_button = gr.Button("Generate Response") generate_button.click( fn=generate_response, inputs=[chat_input, chat_history_state], outputs=[response_output, chat_history_state] ) iface.launch()