Spaces:
Sleeping
Sleeping
import os | |
from dotenv import load_dotenv | |
import httpx | |
import gradio as gr | |
from langchain.prompts import PromptTemplate | |
from langchain_huggingface import HuggingFaceEndpoint | |
from langchain_core.messages import BaseMessage, HumanMessage | |
from langgraph.graph import MessageGraph, END | |
from typing import Sequence | |
# Load environment variables | |
load_dotenv() | |
HF_TOKEN = os.getenv("HF_TOKEN") | |
WEATHER_TOKEN = os.getenv("WEATHER_TOKEN") | |
# Initialize the HuggingFace inference endpoint | |
llm = HuggingFaceEndpoint( | |
repo_id="mistralai/Mistral-7B-Instruct-v0.3", | |
huggingfacehub_api_token=HF_TOKEN.strip(), | |
temperature=0.7, | |
max_new_tokens=200 | |
) | |
# Define nodes | |
def fetch_weather_node(city: str) -> str: | |
url = f"https://api.openweathermap.org/data/2.5/weather?q={city}&appid={WEATHER_TOKEN}&units=metric" | |
try: | |
response = httpx.get(url) | |
response.raise_for_status() | |
weather_data = response.json() | |
weather = weather_data['weather'][0]['main'] | |
temperature = weather_data['main']['temp'] | |
return f"The current weather in {city} is {weather} with a temperature of {temperature}°C." | |
except Exception as e: | |
return f"Error: {e}" | |
def generate_review_node(weather_info: str) -> str: | |
response = llm(weather_info) | |
return response | |
# Define the prompt template for generating weather reviews | |
review_prompt_template = """ | |
You are an expert weather analyst. Based on the provided weather information, generate a detailed and insightful review. | |
Weather Information: {weather_info} | |
Your review should include an analysis of the weather conditions and finish in 150 words. | |
Review: | |
""" | |
# Create and configure the graph | |
builder = MessageGraph() | |
# Add nodes | |
builder.add_node("fetch_weather", fetch_weather_node) | |
builder.add_node("generate_review", generate_review_node) | |
builder.set_entry_point("fetch_weather") | |
# Define transitions | |
builder.add_edge("fetch_weather", "generate_review") | |
builder.set_finish_point("generate_review") | |
# Compile the graph | |
graph = builder.compile() | |
# Define the Gradio interface | |
def get_weather_and_review(city: str) -> str: | |
if city: | |
try: | |
# Prepare the input for the graph | |
weather_info = graph.invoke(HumanMessage(content=city)) | |
weather_info_text = weather_info[1].content | |
# Generate the review using the refined prompt | |
review_input = review_prompt_template.format(weather_info=weather_info_text) | |
review = graph.invoke(HumanMessage(content=review_input)) | |
review_text = review[2].content | |
return f"**Weather Information:**\n{weather_info_text}\n\n**AI Generated Weather Review:**\n{review_text}" | |
except Exception as e: | |
return f"Error generating weather review: {e}" | |
else: | |
return "Please enter a city name." | |
interface = gr.Interface( | |
fn=get_weather_and_review, | |
inputs=gr.Textbox(lines=2, placeholder="Enter the name of a city:", label="City"), | |
outputs="text", | |
title="City Weather Information with AI Review", | |
description="Enter the name of a city to get current weather information and an AI-generated review based on that information." | |
) | |
if __name__ == "__main__": | |
interface.launch() | |