File size: 4,633 Bytes
ca6ee31
 
 
 
 
 
 
 
 
 
 
 
 
 
fd4da4e
ca6ee31
fd4da4e
ca6ee31
fd4da4e
 
 
ca6ee31
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fd4da4e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ca6ee31
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fd4da4e
 
 
 
ca6ee31
 
fd4da4e
 
 
 
 
 
 
 
 
ca6ee31
fd4da4e
ca6ee31
fd4da4e
 
ca6ee31
 
fd4da4e
ca6ee31
fd4da4e
ca6ee31
 
fd4da4e
 
ca6ee31
fd4da4e
 
 
ca6ee31
fd4da4e
 
ca6ee31
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fd4da4e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
# # Function Calling with OpenAI APIs
import requests
import os
import json
from dotenv import load_dotenv
import streamlit as st
load_dotenv()

from groq import Groq

client = Groq(
    api_key=os.getenv("GROQ_API_KEY"),
)

# st.title("Weather App with Chat Interface")

# input_text = st.text_input("Hi, I am a weather chatbot. Ask me anything!")

# if st.button("Ask me"):
#     if not input_text:
#         st.error("Please enter a location!")

# ### Define Dummy Function

# Defines a dummy function to get the current weather
def get_current_weather(location):
    url = f'https://api.openweathermap.org/data/2.5/weather?q={location}&appid={os.getenv("OPENWEATHER_API_KEY")}'
    response = requests.get(url)
    data=response.json()
    if data['cod'] == 200:
        return data
    else:
        return json.dumps({"city": location, "weather": "Data Fetch Error", "temperature": "N/A"})

# print(get_current_weather("London"))
# ### Define Functions
# 
# As demonstrated in the OpenAI documentation, here is a simple example of how to define the functions that are going to be part of the request. 
# 
# The descriptions are important because these are passed directly to the LLM and the LLM will use the description to determine whether to use the functions or how to use/call.



# # define a function as tools
# tools = [
#     {
#         "type": "function",
#         "function": {
#             "name": "get_current_weather",
#             "description": "Get the current weather in a given location",
#             "parameters": {
#                 "type": "object",
#                 "properties": {
#                     "location": {
#                         "type": "string",
#                         "description": "The city and state, e.g. San Francisco, CA"
#                     }
#                 },
#                 "required": ["location"]
#             }
#         }
#     },
# ]


def get_response(input_text):
    tools = [
    {
        "type": "function",
        "function": {
            "name": "get_current_weather",
            "description": "Get the current weather in a given location",
            "parameters": {
                "type": "object",
                "properties": {
                    "location": {
                        "type": "string",
                        "description": "The city and state, e.g. San Francisco, CA"
                    }
                },
                "required": ["location"]
            }
        }
    },
]
    response = client.chat.completions.create(
      model="mixtral-8x7b-32768",
        messages=[
          {
            "role": "user",
            "content": input_text,
             }
        ],
        temperature=0,
        max_tokens=300,
        tools=tools,
        tool_choice="auto"
    )
    # print(response)
    # print(response.choices[0].message.content)

    # print(response['choices'][0]['message']['tool_calls'][0]['function']['arguments'])

    groq_response = response.choices[0].message
    # print(groq_response)


    # response.tool_calls[0].function.arguments

    # We can now capture the arguments:


    args = json.loads(groq_response.tool_calls[0].function.arguments)
    #    print(args)

    output=get_current_weather(**args)
    # print(output)
    from groq import Groq

    client = Groq()
    completion = client.chat.completions.create(
    model="mixtral-8x7b-32768",
    messages=[
        {
            "role": "system",
            "content": "You are a helpful assistant. You are given the weather details in json format. Read the data and give a brief description of the weather and then answer the question. All temperatures are in kelvin. Only mention details about the weather. "
        },
        {
            "role": "user",
            "content": json.dumps(output)
        }
    ],
    temperature=0.25,
    max_tokens=200,
    top_p=1,
    stream=True,
    stop=None,
    )
    output=""
    # st.write("Response:")
    for chunk in completion:
        output+=chunk.choices[0].delta.content or ""
    # output+="\n"
    return output

def main():
    st.title("Weather Chatbot")

    # User input
    st.write("Hi, I am a weather chatbot. Ask me anything!")
    location = st.text_input("Type in your question")

    # Ask me button
    if st.button("Ask me"):
        # Check if location is provided
        if location:
            # Get current weather
            response = get_response(location)
            # Display weather details
            st.json(response)
        else:
            st.warning("Please enter a city name.")

if __name__ == "__main__":
    main()