ravithejads commited on
Commit
711256b
1 Parent(s): 5ea1b31

Upload 4 files

Browse files
Files changed (4) hide show
  1. README.md +5 -5
  2. app.py +254 -0
  3. data.json +0 -0
  4. requirements.txt +3 -0
README.md CHANGED
@@ -1,10 +1,10 @@
1
  ---
2
- title: Holybot
3
- emoji: 🌍
4
- colorFrom: pink
5
- colorTo: green
6
  sdk: gradio
7
- sdk_version: 3.29.0
8
  app_file: app.py
9
  pinned: false
10
  ---
 
1
  ---
2
+ title: HolyBot
3
+ emoji: 😻
4
+ colorFrom: green
5
+ colorTo: red
6
  sdk: gradio
7
+ sdk_version: 3.20.1
8
  app_file: app.py
9
  pinned: false
10
  ---
app.py ADDED
@@ -0,0 +1,254 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import openai
2
+ import pinecone
3
+ import gradio as gr
4
+ import os
5
+ import json
6
+
7
+ # initiates pinecone
8
+
9
+
10
+ def init_pinecone(apikey, environment):
11
+ pinecone.init(api_key=apikey, environment=environment)
12
+
13
+
14
+ def qa_gita(query, openaikey):
15
+ # Basic Checks
16
+ if not query:
17
+ return "Please enter your query."
18
+ if not openaikey:
19
+ return "Please enter openaikey."
20
+
21
+ openai.api_key = openaikey
22
+
23
+ response = openai.Embedding.create(
24
+ input=[query], model="text-embedding-ada-002")
25
+ embedding = response["data"][0]["embedding"]
26
+
27
+ pinecone_api_key_gita = os.getenv("PINECONE_API_KEY_GITA")
28
+ pinecone_environment = os.getenv("PINECONE_ENVIRONMENT_GITA")
29
+
30
+ init_pinecone(pinecone_api_key_gita, pinecone_environment)
31
+
32
+ index = pinecone.Index("gita")
33
+
34
+ with open("data.json", "r") as f:
35
+ data = json.loads(f.read())
36
+
37
+ res = index.query(vector=(embedding), top_k=8)
38
+
39
+ ids = [i["id"] for i in res["matches"]]
40
+
41
+ context = ""
42
+
43
+ for id in ids:
44
+ context = context + str(id) + ": " + data["gita"][str(id)] + "\n\n"
45
+
46
+ systemprompt = f"You are not an AI Language model. You will be a Bhagwad Gita Assistant to the user. Restrict Yourself to the context of the question."
47
+
48
+ userprompt = f"Verses:\n\n{context}\n\nQuestion: {query}\n\nAnswer:\n\n"
49
+
50
+ response = openai.ChatCompletion.create(
51
+ model="gpt-3.5-turbo",
52
+ messages=[
53
+ {"role": "system", "content": systemprompt},
54
+ {"role": "user", "content": userprompt},
55
+ ],
56
+ max_tokens=256,
57
+ temperature=0.0,
58
+ )
59
+
60
+ answer = response["choices"][0]["message"]["content"]
61
+
62
+ return answer, context
63
+
64
+
65
+ def qa_bible(query, openaikey):
66
+ # Basic Checks
67
+ if not query:
68
+ return "Please enter your query."
69
+ if not openaikey:
70
+ return "Please enter openaikey."
71
+
72
+ openai.api_key = openaikey
73
+
74
+ response = openai.Embedding.create(
75
+ input=[query], model="text-embedding-ada-002")
76
+ embedding = response["data"][0]["embedding"]
77
+
78
+ pinecone_api_key_bible = os.getenv("PINECONE_API_KEY_BIBLE")
79
+ pinecone_environment = os.getenv("PINECONE_ENVIRONMENT_BIBLE")
80
+
81
+ init_pinecone(pinecone_api_key_bible, pinecone_environment)
82
+
83
+ index = pinecone.Index("bible")
84
+
85
+ with open("data.json", "r") as f:
86
+ data = json.loads(f.read())
87
+
88
+ res = index.query(vector=(embedding), top_k=8)
89
+
90
+ ids = [i["id"] for i in res["matches"]]
91
+
92
+ context = ""
93
+
94
+ for id in ids:
95
+ context = context + str(id) + ": " + data["bible"][str(id)] + "\n\n"
96
+
97
+ systemprompt = f"You are not an AI Language model. You will be a Bible Assistant to the user. Restrict Yourself to the context of the question."
98
+
99
+ userprompt = f"Verses:\n\n{context}\n\nQuestion: {query}\n\nAnswer:\n\n"
100
+
101
+ response = openai.ChatCompletion.create(
102
+ model="gpt-3.5-turbo",
103
+ messages=[
104
+ {"role": "system", "content": systemprompt},
105
+ {"role": "user", "content": userprompt},
106
+ ],
107
+ max_tokens=256,
108
+ temperature=0.0,
109
+ )
110
+
111
+ answer = response["choices"][0]["message"]["content"]
112
+
113
+ return answer, context
114
+
115
+
116
+ def qa_quran(query, openaikey):
117
+ # Basic Checks
118
+ if not query:
119
+ return "Please enter your query."
120
+ if not openaikey:
121
+ return "Please enter openaikey."
122
+
123
+ openai.api_key = openaikey
124
+
125
+ response = openai.Embedding.create(
126
+ input=[query], model="text-embedding-ada-002")
127
+ embedding = response["data"][0]["embedding"]
128
+
129
+ pinecone_api_key_quran = os.getenv("PINECONE_API_KEY_QURAN")
130
+ pinecone_environment = os.getenv("PINECONE_ENVIRONMENT_QURAN")
131
+
132
+ init_pinecone(pinecone_api_key_quran, pinecone_environment)
133
+
134
+ index = pinecone.Index("quran")
135
+
136
+ with open("data.json", "r") as f:
137
+ data = json.loads(f.read())
138
+
139
+ res = index.query(vector=(embedding), top_k=8)
140
+
141
+ ids = [i["id"] for i in res["matches"]]
142
+
143
+ context = ""
144
+
145
+ for id in ids:
146
+ context = context + str(id) + ": " + data["quran"][str(id)] + "\n\n"
147
+
148
+ systemprompt = f"You are not an AI Language model. You will be a Quran Assistant to the user. Restrict Yourself to the context of the question."
149
+
150
+ userprompt = f"Verses:\n\n{context}\n\nQuestion: {query}\n\nAnswer:\n\n"
151
+
152
+ response = openai.ChatCompletion.create(
153
+ model="gpt-3.5-turbo",
154
+ messages=[
155
+ {"role": "system", "content": systemprompt},
156
+ {"role": "user", "content": userprompt},
157
+ ],
158
+ max_tokens=256,
159
+ temperature=0.0,
160
+ )
161
+
162
+ answer = response["choices"][0]["message"]["content"]
163
+
164
+ return answer, context
165
+
166
+
167
+ def cleartext(query, output, references):
168
+ """
169
+ Function to clear text
170
+ """
171
+ return ["", "", ""]
172
+
173
+
174
+ with gr.Blocks() as demo:
175
+ gr.Markdown(
176
+ """
177
+ <h1><center><b>HolyBot</center></h1>
178
+ """
179
+ )
180
+ gr.Markdown(
181
+ """
182
+ HolyBot answers your queries and gives relevant verses based on Bhagwad Gita/ Quran/ Bible holy books, built using OpenAI ChatGPT, and Pinecone Index.
183
+ - Get your [OpenAI API Key](https://platform.openai.com/account/api-keys) before proceeding further.
184
+ - Refer to the codebase for this project on [GitHub](https://github.com/ravi03071991/HolyBot)."""
185
+ )
186
+ with gr.Tabs():
187
+ openaikey = gr.Textbox(lines=1, label="Enter Your OpenAI Key")
188
+ with gr.TabItem("Bhagwad Gita"):
189
+ with gr.Row():
190
+ with gr.Column():
191
+ query1 = gr.Textbox(
192
+ lines=2, label="Enter Your Situation/ Query.")
193
+ submit_button1 = gr.Button("Submit")
194
+ with gr.Column():
195
+ ans_output1 = gr.Textbox(lines=5, label="Answer.")
196
+ references1 = gr.Textbox(
197
+ lines=10, label="Relevant Verses.")
198
+ clear_button1 = gr.Button("Clear")
199
+ with gr.TabItem("Quran"):
200
+ with gr.Row():
201
+ with gr.Column():
202
+ query2 = gr.Textbox(
203
+ lines=2, label="Enter Your Situation/ Query.")
204
+ submit_button2 = gr.Button("Submit")
205
+ with gr.Column():
206
+ ans_output2 = gr.Textbox(lines=5, label="Answer.")
207
+ references2 = gr.Textbox(
208
+ lines=10, label="Relevant Verses.")
209
+ clear_button2 = gr.Button("Clear")
210
+ with gr.TabItem("Bible"):
211
+ with gr.Row():
212
+ with gr.Column():
213
+ query3 = gr.Textbox(
214
+ lines=2, label="Enter Your Situation/ Query.")
215
+ submit_button3 = gr.Button("Submit")
216
+ with gr.Column():
217
+ ans_output3 = gr.Textbox(lines=5, label="Answer.")
218
+ references3 = gr.Textbox(
219
+ lines=10, label="Relevant Verses.")
220
+ clear_button3 = gr.Button("Clear")
221
+
222
+ # For Bhagwad Gita
223
+ # Submit button for submitting query.
224
+ submit_button1.click(qa_gita, inputs=[query1, openaikey], outputs=[
225
+ ans_output1, references1])
226
+ # Clear button for clearing query and answer.
227
+ clear_button1.click(
228
+ cleartext,
229
+ inputs=[query1, ans_output1, references1],
230
+ outputs=[query1, ans_output1, references1],
231
+ )
232
+
233
+ # For Quran
234
+ # Submit button for submitting query.
235
+ submit_button2.click(qa_quran, inputs=[query2, openaikey], outputs=[
236
+ ans_output2, references2])
237
+ # Clear button for clearing query and answer.
238
+ clear_button2.click(
239
+ cleartext,
240
+ inputs=[query2, ans_output2, references2],
241
+ outputs=[query2, ans_output2, references2],
242
+ )
243
+
244
+ # For Bible
245
+ # Submit button for submitting query.
246
+ submit_button3.click(qa_bible, inputs=[query3, openaikey], outputs=[
247
+ ans_output3, references3])
248
+ # Clear button for clearing query and answer.
249
+ clear_button3.click(
250
+ cleartext,
251
+ inputs=[query3, ans_output3, references3],
252
+ outputs=[query3, ans_output3, references3],
253
+ )
254
+ demo.launch(debug=True)
data.json ADDED
The diff for this file is too large to render. See raw diff
 
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ openai
2
+ pinecone-client
3
+ gradio