Spaces:
Sleeping
Sleeping
File size: 3,302 Bytes
74109b2 de6500f e8ef1da 1c40862 de6500f 1e237a2 4e07320 de6500f 1c40862 de6500f 74109b2 de6500f 4eae8e3 de6500f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 |
print('Running Main')
import os
import gradio as gr
from langchain.agents import Tool
from langchain_community.llms import LlamaCpp
from langchain.agents import initialize_agent
from functions import get_weather_info, get_forecast, shutdown
from huggingface_hub import hf_hub_download
print('goinf to download model')
# Download the model directly in the app
model_path = hf_hub_download(
repo_id="microsoft/Phi-3-mini-4k-instruct-gguf",
filename="Phi-3-mini-4k-instruct-q4.gguf")
print('going to initialize model')
# Initialize the LlamaCpp model
llm = LlamaCpp(
model_path=model_path,
n_ctx=4096,
n_gpu_layers=-1
)
# Define tools
weather_tool = Tool(
name="WeatherLookup",
func=lambda city: get_weather_info(city),
description="Useful to get the current weather (Today) information for a city. It includes information on temperature, pressure, humidity, wind, clouds, and rain."
)
forecast_tool = Tool(
name="ForecastLookup",
func=lambda city: get_forecast(city),
description="Useful to get the weather forecast for the next two days for a city. It includes information on temperature, pressure, humidity, wind, clouds, and rain."
)
# Tools (Include both Weather and Forecast Tools)
tools = [weather_tool, forecast_tool]
# Initialize Agent
agent = initialize_agent(tools, llm, agent="zero-shot-react-description", verbose=True)
def respond(message, history):
try:
# Create the prompt based on the history
prompt = "\n".join([f"{'User' if i % 2 == 0 else 'Assistant'}: {m[0]}" for i, m in enumerate(history)]) + "\nAssistant:"
# Generate response using LangChain agent
response = agent.run(message)
# Update history with the assistant's response
history.append((message, response))
return response, history
except Exception as e:
return f"An error occurred: {e}", history
# Define the Gradio interface
with gr.Blocks(css="style.css") as demo:
gr.Markdown(
"""
# Weather Chatbot
Get real-time weather forecasts or chat with our assistant. Type your queries in natural language.
"""
)
with gr.Row():
with gr.Column():
message = gr.Textbox(label="Ask a weather question or chat with the assistant", lines=2, placeholder="Type your question here...")
response = gr.Textbox(label="Response", lines=2)
state = gr.State([])
btn = gr.Button("Submit")
btn.click(respond, [message, state], [response, state])
shutdown_btn = gr.Button("Shutdown")
shutdown_btn.click(shutdown, [], response)
gr.Examples(
examples=[
["What's the weather in New York?"],
["Tell me the weather forecast for Tokyo."],
["What's the temperature in London?"]
],
inputs=message
)
# Launch the Gradio interface
def main():
print('about to start')
demo.launch(
server_name="0.0.0.0",
server_port=7860,
ssl_keyfile="/home/user/app/certificates/selfsigned.key",
ssl_certfile="/home/user/app/certificates/selfsigned.crt",
ssl_verify=False, # Disable SSL verification for development
share=True,
)
if __name__ == "__main__":
main()
|