Update app.py
Browse files
app.py
CHANGED
@@ -1,6 +1,5 @@
|
|
1 |
-
#
|
2 |
-
|
3 |
-
# !pip install yfinance
|
4 |
import pandas as pd
|
5 |
import io
|
6 |
import requests
|
@@ -14,6 +13,8 @@ import requests
|
|
14 |
import yfinance as yf
|
15 |
import ast
|
16 |
import re
|
|
|
|
|
17 |
|
18 |
|
19 |
# import langchain libraries
|
@@ -150,7 +151,7 @@ data_old_2 = loader2.load()
|
|
150 |
loader3 = CSVLoader("nse_ca.csv")
|
151 |
data_ca = loader3.load()
|
152 |
|
153 |
-
global vectorstore,vectorstore2,vectorstore3
|
154 |
# Create vectorstores - I tried Chroma but FAISS turned out to be successful
|
155 |
vectorstore = FAISS.from_documents(data_old, embedding_function)
|
156 |
vectorstore2 = FAISS.from_documents(data_old_2, embedding_function)
|
@@ -184,7 +185,6 @@ co_list = co_list1 + co_list_tracked
|
|
184 |
|
185 |
####################################
|
186 |
|
187 |
-
|
188 |
##################################
|
189 |
# Let us create some functions required
|
190 |
##################################
|
@@ -197,6 +197,8 @@ def give_announcement(llm,stock):
|
|
197 |
|
198 |
else:
|
199 |
|
|
|
|
|
200 |
qa_chain = RetrievalQA.from_chain_type(llm,
|
201 |
retriever=retriever1,
|
202 |
return_source_documents=False)
|
@@ -217,7 +219,7 @@ def get_ca(llm,stock):
|
|
217 |
# stock = resp1.content
|
218 |
|
219 |
|
220 |
-
|
221 |
qa_chain2 = RetrievalQA.from_chain_type(llm,
|
222 |
retriever=retriever3,
|
223 |
return_source_documents=False)
|
@@ -250,12 +252,12 @@ def get_movements(llm,stock):
|
|
250 |
return "This company has not made any announcements today or yesterday"
|
251 |
else:
|
252 |
|
253 |
-
stock = stock[0]
|
254 |
|
255 |
dfc = pd.read_csv('nse_data_old.csv')
|
256 |
|
257 |
-
stockdesc = dfc[dfc['COMPANY NAME'] == stock]['COMPANY NAME'].iloc[0]
|
258 |
-
stock1 = dfc[dfc['COMPANY NAME'] == stock]['SYMBOL'].iloc[0]
|
259 |
stock = get_ticker(stock1)
|
260 |
|
261 |
print("stock is ",stock)
|
@@ -319,11 +321,13 @@ def get_sentiments(llm,stock):
|
|
319 |
if not stock:
|
320 |
return "This company has not made any announcements today or yesterday"
|
321 |
else:
|
322 |
-
stock
|
|
|
|
|
323 |
#####
|
324 |
dfc = pd.read_csv('nse_data_old.csv')
|
325 |
|
326 |
-
stockdesc = dfc[dfc['COMPANY NAME'] == stock]['COMPANY NAME'].iloc[0]
|
327 |
stock1 = dfc[dfc['COMPANY NAME'] == stock]['SYMBOL'].iloc[0]
|
328 |
stock = get_ticker(stock1)
|
329 |
tools=[
|
@@ -356,12 +360,17 @@ def get_sentiments(llm,stock):
|
|
356 |
)
|
357 |
|
358 |
# Construct the Tools agent
|
359 |
-
agent = create_tool_calling_agent(llm, tools, prompt)
|
360 |
|
361 |
# Create an agent executor by passing in the agent and tools
|
362 |
agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True)
|
363 |
-
|
364 |
-
|
|
|
|
|
|
|
|
|
|
|
365 |
#################
|
366 |
|
367 |
# Fetch financial statements from Yahoo Finance
|
@@ -399,7 +408,9 @@ def get_ticker(company_name):
|
|
399 |
ticker = yf.Ticker(com)
|
400 |
return ticker.info['symbol']
|
401 |
|
402 |
-
def get_financialratio(model, input):
|
|
|
|
|
403 |
|
404 |
llm = get_model(model)
|
405 |
|
@@ -499,7 +510,7 @@ def get_financialratio(model, input):
|
|
499 |
# Print only the results. Print the output in Json format.")
|
500 |
return f"For the company: {stockname}, Here are the details: {response}"
|
501 |
except Exception as e:
|
502 |
-
return f"An error occurred: {str(e)}"
|
503 |
|
504 |
##########################
|
505 |
# Functions to plot a chart over ratios - this has scope for major enhancements!
|
@@ -529,10 +540,11 @@ def plot_chart(data):
|
|
529 |
# plt = plot_chart(response)
|
530 |
# return plt
|
531 |
|
532 |
-
def get_chart(model,input):
|
|
|
533 |
if stock_name:
|
534 |
|
535 |
-
response = get_financialratio(model,input)
|
536 |
# Extract the dictionary part using regex
|
537 |
dict_match = re.search(r"\{.*\}", response) # Search for content within curly braces
|
538 |
|
@@ -548,8 +560,8 @@ def get_chart(model,input):
|
|
548 |
else: return None
|
549 |
|
550 |
|
551 |
-
def combined_ratio(model, input):
|
552 |
-
return get_financialratio(model,input), get_chart(model, input)
|
553 |
|
554 |
###############################
|
555 |
|
@@ -559,9 +571,9 @@ def combined_ratio(model, input):
|
|
559 |
# Create the Gradio Blocks interface with a title and description
|
560 |
|
561 |
##################################
|
562 |
-
|
563 |
def incremental_process():
|
564 |
-
global vectorstore,vectorstore2,vectorstore3,
|
565 |
|
566 |
try:
|
567 |
df_new, _ = get_pd(1)
|
@@ -577,22 +589,20 @@ def incremental_process():
|
|
577 |
#drop unnecessary common columns
|
578 |
df_new.drop(['RECEIPT','DISSEMINATION','DIFFERENCE'],axis=1,inplace=True)
|
579 |
|
580 |
-
|
581 |
-
|
582 |
-
# #find the difference and add incrementally for first store
|
583 |
df_merged = df_new.merge(df_old, how='left', indicator=True)
|
584 |
# Filter rows that are unique to 'n' (i.e., where '_merge' is 'left_only')
|
585 |
df_add1= df_merged[df_merged['_merge'] == 'left_only'].drop(columns=['_merge'])
|
586 |
|
587 |
# Save it as a CSV file
|
588 |
df_add1.to_csv("nse_data_add1.csv", index=False)
|
589 |
-
|
590 |
#drop unnecessary columns for second vector store
|
591 |
df_new2 = df_new.drop(['ATTACHMENT'],axis=1)
|
592 |
|
593 |
# add increment for second store
|
594 |
df_merged = df_new2.merge(df_old2, how='left', indicator=True)
|
595 |
-
df_add2 = df_merged[df_merged['_merge'] == 'left_only'].drop(columns=['_merge'])
|
596 |
# Save it as a CSV file
|
597 |
df_add2.to_csv("nse_data_add2.csv", index=False)
|
598 |
|
@@ -614,21 +624,7 @@ def incremental_process():
|
|
614 |
# Append df2 at the end of df1
|
615 |
dfco = pd.concat([dfold, dfadd], ignore_index=True)
|
616 |
|
617 |
-
|
618 |
-
|
619 |
-
dfco1 = dfco[['COMPANY NAME']]
|
620 |
-
dfco2 = dfco1.drop_duplicates()
|
621 |
-
|
622 |
-
# Save the result to a new CSV file
|
623 |
-
dfco2.to_csv('companies.csv', index=False)
|
624 |
-
|
625 |
-
dfco3 = dfco2.head(10)
|
626 |
-
|
627 |
-
co_list3 = dfco3['COMPANY NAME'].unique().tolist()
|
628 |
-
|
629 |
-
filtered_df = dfco2[dfco2['COMPANY NAME'].isin(co_list)]
|
630 |
-
|
631 |
-
co_list2 = filtered_df['COMPANY NAME'].tolist()
|
632 |
|
633 |
# Here incremental RAG is achieved by adding additional data dynamically to vectorstore
|
634 |
loader = CSVLoader("nse_data_add1.csv")
|
@@ -681,39 +677,78 @@ def incremental_process():
|
|
681 |
vectorstore3 = FAISS.load_local("vectorstore3",embedding_function,allow_dangerous_deserialization=True)
|
682 |
print("final size store 3",vectorstore3.index.ntotal)
|
683 |
|
684 |
-
|
685 |
-
|
686 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
687 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
688 |
timestamp = dfco[['BROADCAST DATE/TIME']].max().values.tolist()[0]
|
689 |
-
# return flag, co_list2, retriever1, retriever2, retriever3
|
690 |
if flag == 1:
|
691 |
message = f"There is NSE timeout error. The latest filing information is available upto {timestamp}"
|
692 |
else: message = f"Lastest filing information is available upto {timestamp}"
|
693 |
return message
|
694 |
|
695 |
-
|
|
|
|
|
|
|
|
|
|
|
696 |
|
697 |
def give_time():
|
698 |
-
|
699 |
timestamp = dfco[['BROADCAST DATE/TIME']].max().values.tolist()[0]
|
700 |
return timestamp
|
701 |
|
702 |
|
|
|
|
|
|
|
|
|
|
|
|
|
703 |
def refresh():
|
704 |
-
|
705 |
-
|
706 |
-
|
707 |
-
|
708 |
-
|
709 |
-
|
710 |
-
|
711 |
-
|
712 |
-
|
713 |
-
|
714 |
-
|
715 |
-
|
716 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
717 |
|
718 |
|
719 |
def plot1_top_20():
|
@@ -761,8 +796,9 @@ def plot1_top_20():
|
|
761 |
|
762 |
## Function to create company list specific chart
|
763 |
def plot2_top_20():
|
|
|
764 |
|
765 |
-
global co_list2
|
766 |
# Get the counts of each label
|
767 |
df = pd.read_csv('nse_data_old.csv')
|
768 |
subjects = ['Acquisition',
|
@@ -822,10 +858,11 @@ def get_companynames(stock):
|
|
822 |
unique_companies = matched_rows['COMPANY NAME'].unique()
|
823 |
|
824 |
return list(set(unique_companies))
|
825 |
-
else: return None
|
826 |
|
827 |
# A combined function to be used in Gradio output box
|
828 |
def print_model(llm):
|
|
|
829 |
if co_list2:
|
830 |
return f"You are using {llm.model_name} model for this session. \n \n" \
|
831 |
f"These are the companies you track: {co_list_tracked}. \n \n" \
|
@@ -841,11 +878,9 @@ def print_model1(llm):
|
|
841 |
|
842 |
|
843 |
def combined_function1(model,stock):
|
844 |
-
global
|
845 |
-
# flag, co_list2, retriever1, retriever2, retriever3 = incremental_process()
|
846 |
llm = get_model(model)
|
847 |
stock = get_companynames(stock)
|
848 |
-
stock_name = stock
|
849 |
if flag == 0:
|
850 |
return print_model(llm), give_announcement(llm,stock),get_ca(llm,stock),get_movements(llm,stock), get_sentiments(llm,stock)
|
851 |
else:
|
@@ -865,6 +900,8 @@ def get_model(model_name):
|
|
865 |
|
866 |
# This function is given here as company list is dynamic
|
867 |
def give_names():
|
|
|
|
|
868 |
return f"Apart from NIFTY, these are the companies you track: \n \n" \
|
869 |
f" {co_list_tracked}. \n \n" \
|
870 |
f"These are the tracked companies that have made announcements: \n \n" \
|
@@ -874,9 +911,9 @@ def give_names():
|
|
874 |
|
875 |
|
876 |
##############################
|
877 |
-
retrieval_qa_chat_prompt = hub.pull("langchain-ai/retrieval-qa-chat")
|
878 |
###############################
|
879 |
-
|
880 |
# This function is for chat queries. Given here due to retriever defined here
|
881 |
def chat_chain(model,query):
|
882 |
llm = get_model(model)
|
@@ -885,15 +922,19 @@ def chat_chain(model,query):
|
|
885 |
else:
|
886 |
combine_docs_chain = create_stuff_documents_chain(
|
887 |
llm, retrieval_qa_chat_prompt)
|
|
|
888 |
retrieval_chain = create_retrieval_chain(retriever2, combine_docs_chain)
|
889 |
response = retrieval_chain.invoke({"input": query})
|
890 |
return response['answer']
|
891 |
|
892 |
|
893 |
-
|
|
|
|
|
|
|
894 |
|
895 |
-
###########################################################################
|
896 |
with gr.Blocks() as demo:
|
|
|
897 |
# Add a Markdown block for the description
|
898 |
gr.Markdown("""<h1 style='color: blue;'>Chat and Analyze with NSE Filings Information</h1>""")
|
899 |
gr.Markdown("""Powered by Gradio, Groq, Llama3, FAISS, Langchain, YahooFinance""")
|
@@ -907,8 +948,8 @@ with gr.Blocks() as demo:
|
|
907 |
"""
|
908 |
)
|
909 |
|
910 |
-
txt_output = gr.Text(give_time,label = "Opening Data - Timestamp of latest Filing")
|
911 |
-
txt_output = gr.Text(give_names,label = "Announcements for tracked companies")
|
912 |
|
913 |
# This is for defaulting charts when app is launched
|
914 |
plot_output1 = gr.Plot(plot1_top_20(), label="Chart") # Call the function to create the plot
|
@@ -916,11 +957,12 @@ with gr.Blocks() as demo:
|
|
916 |
plot_output2 = gr.Plot(plot2_top_20(), label="Chart") # Call the function to create the plot
|
917 |
plt.close()
|
918 |
gr.Markdown("""<h2 style='color: blue;'>Fetch Announcements/Corporate Actions/Price Movements/Broker Sentiments</h2>""")
|
919 |
-
# Use a Column to structure the inputs and outputs
|
920 |
with gr.Column():
|
921 |
outputs5 = [gr.Textbox(label="Latest Filing Timestamp",placeholder="Refresh data if stale for more than an hour")]
|
922 |
button5 = gr.Button("Refresh Data")
|
923 |
-
button5.click(refresh, inputs=None, outputs=outputs5)
|
|
|
924 |
|
925 |
# Create a dropdown box for selecting the operation
|
926 |
operation_dropdown = gr.Dropdown(
|
@@ -943,23 +985,21 @@ with gr.Blocks() as demo:
|
|
943 |
gr.Textbox(label="Broker Sentiment", max_lines=100,show_copy_button=True),
|
944 |
]
|
945 |
|
946 |
-
button1.click(combined_function1, inputs=[operation_dropdown,text_input1], outputs=outputs1)
|
947 |
-
gr.Markdown("""<h1 style='color: green;'>Analyse Financial Statements
|
948 |
|
949 |
-
# third button
|
950 |
text_input3 = gr.Textbox(
|
951 |
label="Enter Query",
|
952 |
placeholder="Enter your query: e.g., What is the current ratio of the stock over three years?",
|
953 |
-
lines=
|
954 |
-
|
955 |
-
)
|
956 |
button3 = gr.Button("Analyse")
|
957 |
outputs3 = [
|
958 |
gr.Textbox(label="Chat Response", max_lines=100,show_copy_button=True),
|
959 |
-
gr.Plot(label = "Chart")
|
960 |
-
|
961 |
-
|
962 |
-
button3.click(combined_ratio, inputs=[operation_dropdown,text_input3], outputs=outputs3)
|
963 |
|
964 |
gr.Markdown("""<h1 style='color: orange;'>Chat With the NSE Filings Information</h1>""")
|
965 |
|
@@ -975,4 +1015,5 @@ with gr.Blocks() as demo:
|
|
975 |
button2.click(chat_chain, inputs=[operation_dropdown,text_input2], outputs=outputs2)
|
976 |
|
977 |
# Launch the Gradio app
|
978 |
-
demo.launch()
|
|
|
|
1 |
+
# !pip install langchain langchain-groq sentence-transformers langchainhub faiss-cpu gradio gradio_client yfinance duckduckgo-search
|
2 |
+
|
|
|
3 |
import pandas as pd
|
4 |
import io
|
5 |
import requests
|
|
|
13 |
import yfinance as yf
|
14 |
import ast
|
15 |
import re
|
16 |
+
from datetime import datetime, timedelta
|
17 |
+
import pytz
|
18 |
|
19 |
|
20 |
# import langchain libraries
|
|
|
151 |
loader3 = CSVLoader("nse_ca.csv")
|
152 |
data_ca = loader3.load()
|
153 |
|
154 |
+
global vectorstore,vectorstore2,vectorstore3, colist, colist_tracked
|
155 |
# Create vectorstores - I tried Chroma but FAISS turned out to be successful
|
156 |
vectorstore = FAISS.from_documents(data_old, embedding_function)
|
157 |
vectorstore2 = FAISS.from_documents(data_old_2, embedding_function)
|
|
|
185 |
|
186 |
####################################
|
187 |
|
|
|
188 |
##################################
|
189 |
# Let us create some functions required
|
190 |
##################################
|
|
|
197 |
|
198 |
else:
|
199 |
|
200 |
+
|
201 |
+
retriever1 = vectorstore.as_retriever()
|
202 |
qa_chain = RetrievalQA.from_chain_type(llm,
|
203 |
retriever=retriever1,
|
204 |
return_source_documents=False)
|
|
|
219 |
# stock = resp1.content
|
220 |
|
221 |
|
222 |
+
retriever3 = vectorstore3.as_retriever()
|
223 |
qa_chain2 = RetrievalQA.from_chain_type(llm,
|
224 |
retriever=retriever3,
|
225 |
return_source_documents=False)
|
|
|
252 |
return "This company has not made any announcements today or yesterday"
|
253 |
else:
|
254 |
|
255 |
+
stock = stock[0]
|
256 |
|
257 |
dfc = pd.read_csv('nse_data_old.csv')
|
258 |
|
259 |
+
stockdesc = dfc[dfc['COMPANY NAME'] == stock]['COMPANY NAME'].iloc[0]
|
260 |
+
stock1 = dfc[dfc['COMPANY NAME'] == stock]['SYMBOL'].iloc[0]
|
261 |
stock = get_ticker(stock1)
|
262 |
|
263 |
print("stock is ",stock)
|
|
|
321 |
if not stock:
|
322 |
return "This company has not made any announcements today or yesterday"
|
323 |
else:
|
324 |
+
print("st1",stock)
|
325 |
+
stock = stock[0]
|
326 |
+
print("af ",stock)
|
327 |
#####
|
328 |
dfc = pd.read_csv('nse_data_old.csv')
|
329 |
|
330 |
+
stockdesc = dfc[dfc['COMPANY NAME'] == stock]['COMPANY NAME'].iloc[0]
|
331 |
stock1 = dfc[dfc['COMPANY NAME'] == stock]['SYMBOL'].iloc[0]
|
332 |
stock = get_ticker(stock1)
|
333 |
tools=[
|
|
|
360 |
)
|
361 |
|
362 |
# Construct the Tools agent
|
363 |
+
# agent = create_tool_calling_agent(llm, tools, prompt)
|
364 |
|
365 |
# Create an agent executor by passing in the agent and tools
|
366 |
agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True)
|
367 |
+
try:
|
368 |
+
agent = create_tool_calling_agent(llm, tools, prompt)
|
369 |
+
agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True)
|
370 |
+
response = agent_executor.invoke({"input": f"Get broker sentiment for the stock {stock} and stock name {stockdesc}"})
|
371 |
+
return f"Broker sentiment analysis for {stock}. - {response['output']}"
|
372 |
+
except Exception as e:
|
373 |
+
return f"An error occurred: {str(e)}"
|
374 |
#################
|
375 |
|
376 |
# Fetch financial statements from Yahoo Finance
|
|
|
408 |
ticker = yf.Ticker(com)
|
409 |
return ticker.info['symbol']
|
410 |
|
411 |
+
def get_financialratio(model, input,stock):
|
412 |
+
|
413 |
+
stock_name = get_companynames(stock)
|
414 |
|
415 |
llm = get_model(model)
|
416 |
|
|
|
510 |
# Print only the results. Print the output in Json format.")
|
511 |
return f"For the company: {stockname}, Here are the details: {response}"
|
512 |
except Exception as e:
|
513 |
+
return f"An error occurred: {str(e)}"
|
514 |
|
515 |
##########################
|
516 |
# Functions to plot a chart over ratios - this has scope for major enhancements!
|
|
|
540 |
# plt = plot_chart(response)
|
541 |
# return plt
|
542 |
|
543 |
+
def get_chart(model,input,stock):
|
544 |
+
stock_name = get_companynames(stock)
|
545 |
if stock_name:
|
546 |
|
547 |
+
response = get_financialratio(model,input,stock)
|
548 |
# Extract the dictionary part using regex
|
549 |
dict_match = re.search(r"\{.*\}", response) # Search for content within curly braces
|
550 |
|
|
|
560 |
else: return None
|
561 |
|
562 |
|
563 |
+
def combined_ratio(model, input,stock):
|
564 |
+
return get_financialratio(model,input,stock), get_chart(model, input,stock)
|
565 |
|
566 |
###############################
|
567 |
|
|
|
571 |
# Create the Gradio Blocks interface with a title and description
|
572 |
|
573 |
##################################
|
574 |
+
global flag
|
575 |
def incremental_process():
|
576 |
+
global vectorstore,vectorstore2,vectorstore3, flag
|
577 |
|
578 |
try:
|
579 |
df_new, _ = get_pd(1)
|
|
|
589 |
#drop unnecessary common columns
|
590 |
df_new.drop(['RECEIPT','DISSEMINATION','DIFFERENCE'],axis=1,inplace=True)
|
591 |
|
592 |
+
# #find the difference and add incrementally for first store
|
|
|
|
|
593 |
df_merged = df_new.merge(df_old, how='left', indicator=True)
|
594 |
# Filter rows that are unique to 'n' (i.e., where '_merge' is 'left_only')
|
595 |
df_add1= df_merged[df_merged['_merge'] == 'left_only'].drop(columns=['_merge'])
|
596 |
|
597 |
# Save it as a CSV file
|
598 |
df_add1.to_csv("nse_data_add1.csv", index=False)
|
599 |
+
|
600 |
#drop unnecessary columns for second vector store
|
601 |
df_new2 = df_new.drop(['ATTACHMENT'],axis=1)
|
602 |
|
603 |
# add increment for second store
|
604 |
df_merged = df_new2.merge(df_old2, how='left', indicator=True)
|
605 |
+
df_add2 = df_merged[df_merged['_merge'] == 'left_only'].drop(columns=['_merge'])
|
606 |
# Save it as a CSV file
|
607 |
df_add2.to_csv("nse_data_add2.csv", index=False)
|
608 |
|
|
|
624 |
# Append df2 at the end of df1
|
625 |
dfco = pd.concat([dfold, dfadd], ignore_index=True)
|
626 |
|
627 |
+
dfco.to_csv("dfco.csv",index=False)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
628 |
|
629 |
# Here incremental RAG is achieved by adding additional data dynamically to vectorstore
|
630 |
loader = CSVLoader("nse_data_add1.csv")
|
|
|
677 |
vectorstore3 = FAISS.load_local("vectorstore3",embedding_function,allow_dangerous_deserialization=True)
|
678 |
print("final size store 3",vectorstore3.index.ntotal)
|
679 |
|
680 |
+
return flag
|
681 |
+
|
682 |
+
|
683 |
+
def get_colist2():
|
684 |
+
dfco = pd.read_csv('dfco.csv')
|
685 |
+
dfco1 = dfco[['COMPANY NAME']]
|
686 |
+
dfco2 = dfco1.drop_duplicates()
|
687 |
+
# Save the result to a new CSV file
|
688 |
+
dfco2.to_csv('companies.csv', index=False)
|
689 |
+
|
690 |
+
dfco3 = dfco2.head(10)
|
691 |
+
|
692 |
+
co_list3 = dfco3['COMPANY NAME'].unique().tolist()
|
693 |
|
694 |
+
filtered_df = dfco2[dfco2['COMPANY NAME'].isin(co_list)]
|
695 |
+
|
696 |
+
co_list2 = filtered_df['COMPANY NAME'].tolist()
|
697 |
+
return co_list2, co_list3
|
698 |
+
|
699 |
+
def get_timestampmessage(flag):
|
700 |
+
dfco = pd.read_csv('dfco.csv')
|
701 |
timestamp = dfco[['BROADCAST DATE/TIME']].max().values.tolist()[0]
|
|
|
702 |
if flag == 1:
|
703 |
message = f"There is NSE timeout error. The latest filing information is available upto {timestamp}"
|
704 |
else: message = f"Lastest filing information is available upto {timestamp}"
|
705 |
return message
|
706 |
|
707 |
+
|
708 |
+
def update():
|
709 |
+
global flag
|
710 |
+
flag = incremental_process()
|
711 |
+
message = get_timestampmessage(flag)
|
712 |
+
return message
|
713 |
|
714 |
def give_time():
|
715 |
+
dfco = pd.read_csv("dfco.csv")
|
716 |
timestamp = dfco[['BROADCAST DATE/TIME']].max().values.tolist()[0]
|
717 |
return timestamp
|
718 |
|
719 |
|
720 |
+
|
721 |
+
# Define the IST timezone
|
722 |
+
ist_timezone = pytz.timezone("Asia/Kolkata")
|
723 |
+
# Define UTC for server-side time
|
724 |
+
utc_timezone = pytz.utc
|
725 |
+
|
726 |
def refresh():
|
727 |
+
# Get the client-side timestamp (assuming it is in IST)
|
728 |
+
timestamp_str = give_time() # The format returned should match the expected format
|
729 |
+
given_time = datetime.strptime(timestamp_str, "%d-%b-%Y %H:%M:%S")
|
730 |
+
given_time_ist = ist_timezone.localize(given_time) # Localize to IST
|
731 |
+
|
732 |
+
# Get the current server time in UTC
|
733 |
+
current_time_utc = datetime.now(tz=utc_timezone)
|
734 |
+
|
735 |
+
# Convert the client-side time to UTC for consistent comparison
|
736 |
+
given_time_utc = given_time_ist.astimezone(utc_timezone)
|
737 |
+
|
738 |
+
# Calculate the time difference
|
739 |
+
time_difference = current_time_utc - given_time_utc
|
740 |
+
|
741 |
+
print("the time diff is ", time_difference)
|
742 |
+
|
743 |
+
# Check if the time difference is greater than one hour
|
744 |
+
if time_difference > timedelta(hours=1):
|
745 |
+
message1 = update()
|
746 |
+
print("Incremental update run")
|
747 |
+
else:
|
748 |
+
message1 = f"Refresh allowed only if data is stale for more than one hour. Current client timestamp: {timestamp_str}"
|
749 |
+
|
750 |
+
return message1
|
751 |
+
##########################################################################
|
752 |
|
753 |
|
754 |
def plot1_top_20():
|
|
|
796 |
|
797 |
## Function to create company list specific chart
|
798 |
def plot2_top_20():
|
799 |
+
co_list2,_ = get_colist2()
|
800 |
|
801 |
+
# global co_list2
|
802 |
# Get the counts of each label
|
803 |
df = pd.read_csv('nse_data_old.csv')
|
804 |
subjects = ['Acquisition',
|
|
|
858 |
unique_companies = matched_rows['COMPANY NAME'].unique()
|
859 |
|
860 |
return list(set(unique_companies))
|
861 |
+
else: return None
|
862 |
|
863 |
# A combined function to be used in Gradio output box
|
864 |
def print_model(llm):
|
865 |
+
co_list2,_ = get_colist2()
|
866 |
if co_list2:
|
867 |
return f"You are using {llm.model_name} model for this session. \n \n" \
|
868 |
f"These are the companies you track: {co_list_tracked}. \n \n" \
|
|
|
878 |
|
879 |
|
880 |
def combined_function1(model,stock):
|
881 |
+
global flag
|
|
|
882 |
llm = get_model(model)
|
883 |
stock = get_companynames(stock)
|
|
|
884 |
if flag == 0:
|
885 |
return print_model(llm), give_announcement(llm,stock),get_ca(llm,stock),get_movements(llm,stock), get_sentiments(llm,stock)
|
886 |
else:
|
|
|
900 |
|
901 |
# This function is given here as company list is dynamic
|
902 |
def give_names():
|
903 |
+
global co_list_tracked
|
904 |
+
co_list2, co_list3 = get_colist2()
|
905 |
return f"Apart from NIFTY, these are the companies you track: \n \n" \
|
906 |
f" {co_list_tracked}. \n \n" \
|
907 |
f"These are the tracked companies that have made announcements: \n \n" \
|
|
|
911 |
|
912 |
|
913 |
##############################
|
914 |
+
retrieval_qa_chat_prompt = hub.pull("langchain-ai/retrieval-qa-chat")
|
915 |
###############################
|
916 |
+
|
917 |
# This function is for chat queries. Given here due to retriever defined here
|
918 |
def chat_chain(model,query):
|
919 |
llm = get_model(model)
|
|
|
922 |
else:
|
923 |
combine_docs_chain = create_stuff_documents_chain(
|
924 |
llm, retrieval_qa_chat_prompt)
|
925 |
+
retriever2 = vectorstore2.as_retriever()
|
926 |
retrieval_chain = create_retrieval_chain(retriever2, combine_docs_chain)
|
927 |
response = retrieval_chain.invoke({"input": query})
|
928 |
return response['answer']
|
929 |
|
930 |
|
931 |
+
#################################
|
932 |
+
## Update the vectorstate with latest data
|
933 |
+
flag = incremental_process()
|
934 |
+
###########################################################################
|
935 |
|
|
|
936 |
with gr.Blocks() as demo:
|
937 |
+
|
938 |
# Add a Markdown block for the description
|
939 |
gr.Markdown("""<h1 style='color: blue;'>Chat and Analyze with NSE Filings Information</h1>""")
|
940 |
gr.Markdown("""Powered by Gradio, Groq, Llama3, FAISS, Langchain, YahooFinance""")
|
|
|
948 |
"""
|
949 |
)
|
950 |
|
951 |
+
txt_output = gr.Text(give_time(),label = "Opening Data - Timestamp of latest Filing")
|
952 |
+
txt_output = gr.Text(give_names(),label = "Announcements for tracked companies")
|
953 |
|
954 |
# This is for defaulting charts when app is launched
|
955 |
plot_output1 = gr.Plot(plot1_top_20(), label="Chart") # Call the function to create the plot
|
|
|
957 |
plot_output2 = gr.Plot(plot2_top_20(), label="Chart") # Call the function to create the plot
|
958 |
plt.close()
|
959 |
gr.Markdown("""<h2 style='color: blue;'>Fetch Announcements/Corporate Actions/Price Movements/Broker Sentiments</h2>""")
|
960 |
+
# Use a Column to structure the inputs and outputs
|
961 |
with gr.Column():
|
962 |
outputs5 = [gr.Textbox(label="Latest Filing Timestamp",placeholder="Refresh data if stale for more than an hour")]
|
963 |
button5 = gr.Button("Refresh Data")
|
964 |
+
# button5.click(lambda: refresh(dfco), inputs=None, outputs=outputs5)
|
965 |
+
button5.click(lambda: refresh(), inputs=None, outputs=outputs5)
|
966 |
|
967 |
# Create a dropdown box for selecting the operation
|
968 |
operation_dropdown = gr.Dropdown(
|
|
|
985 |
gr.Textbox(label="Broker Sentiment", max_lines=100,show_copy_button=True),
|
986 |
]
|
987 |
|
988 |
+
button1.click(lambda x,y: combined_function1(x,y), inputs=[operation_dropdown,text_input1], outputs=outputs1)
|
989 |
+
gr.Markdown("""<h1 style='color: green;'>Analyse the Financial Statements of the above Company</h1>""")
|
990 |
|
|
|
991 |
text_input3 = gr.Textbox(
|
992 |
label="Enter Query",
|
993 |
placeholder="Enter your query: e.g., What is the current ratio of the stock over three years?",
|
994 |
+
lines=1)
|
995 |
+
|
|
|
996 |
button3 = gr.Button("Analyse")
|
997 |
outputs3 = [
|
998 |
gr.Textbox(label="Chat Response", max_lines=100,show_copy_button=True),
|
999 |
+
gr.Plot(label = "Chart")]
|
1000 |
+
|
1001 |
+
|
1002 |
+
button3.click(combined_ratio, inputs=[operation_dropdown,text_input3,text_input1], outputs=outputs3)
|
1003 |
|
1004 |
gr.Markdown("""<h1 style='color: orange;'>Chat With the NSE Filings Information</h1>""")
|
1005 |
|
|
|
1015 |
button2.click(chat_chain, inputs=[operation_dropdown,text_input2], outputs=outputs2)
|
1016 |
|
1017 |
# Launch the Gradio app
|
1018 |
+
demo.launch()
|
1019 |
+
|