Update excel_chat.py
Browse files- excel_chat.py +34 -9
excel_chat.py
CHANGED
@@ -19,19 +19,41 @@ def ask_llm(query, input, client_index):
|
|
19 |
"content": f"{input}",
|
20 |
}
|
21 |
]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
22 |
|
23 |
-
if client_index ==
|
24 |
client = Groq(api_key=os.environ["GROQ_API_KEY"])
|
25 |
chat_completion = client.chat.completions.create(
|
26 |
messages=messages,
|
27 |
model='mixtral-8x7b-32768',
|
28 |
)
|
29 |
-
|
30 |
client = MistralClient(api_key=os.environ['MISTRAL_API_KEY'])
|
31 |
chat_completion = client.chat(
|
32 |
messages=messages,
|
33 |
model='mistral-small-latest',
|
34 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
35 |
|
36 |
return chat_completion.choices[0].message.content
|
37 |
|
@@ -61,17 +83,20 @@ def chat_with_mistral(source_cols, dest_col, prompt, excel_file, url, search_col
|
|
61 |
|
62 |
for index, row in filtred_df.iterrows():
|
63 |
concatenated_content = "\n\n".join(f"{column_name}: {str(row[column_name])}" for column_name in source_cols)
|
64 |
-
|
65 |
-
|
66 |
-
|
|
|
|
|
|
|
67 |
|
68 |
df.to_excel(file_name, index=False)
|
69 |
return file_name, df.head(5)
|
70 |
|
71 |
-
|
|
|
72 |
if file is not None:
|
73 |
df = pd.read_excel(file)
|
74 |
-
columns =
|
75 |
-
return gr.update(choices=columns), gr.update(choices=columns), gr.update(choices=columns), gr.update(choices=columns + [""]), gr.update(choices=columns + ['[ALL]']), df.head(5)
|
76 |
else:
|
77 |
-
return gr.update(choices=[]), gr.update(choices=[]),
|
|
|
19 |
"content": f"{input}",
|
20 |
}
|
21 |
]
|
22 |
+
systemC = f"You are a helpful assistant. Only show your final response to the **User Query**! Do not provide any explanations or details: \n# User Query:\n{query}."
|
23 |
+
messageC=[
|
24 |
+
{
|
25 |
+
"role": "user",
|
26 |
+
"content": [
|
27 |
+
{
|
28 |
+
"type": "text",
|
29 |
+
"text": f"{input}"
|
30 |
+
}
|
31 |
+
]
|
32 |
+
}
|
33 |
+
]
|
34 |
|
35 |
+
if client_index == "Groq":
|
36 |
client = Groq(api_key=os.environ["GROQ_API_KEY"])
|
37 |
chat_completion = client.chat.completions.create(
|
38 |
messages=messages,
|
39 |
model='mixtral-8x7b-32768',
|
40 |
)
|
41 |
+
elif client_index == "Mistral":
|
42 |
client = MistralClient(api_key=os.environ['MISTRAL_API_KEY'])
|
43 |
chat_completion = client.chat(
|
44 |
messages=messages,
|
45 |
model='mistral-small-latest',
|
46 |
)
|
47 |
+
else:
|
48 |
+
client = anthropic.Anthropic(api_key=os.environ['CLAUDE_API_KEY'])
|
49 |
+
chat_completion = client.messages.create(
|
50 |
+
model="claude-3-sonnet-20240229",
|
51 |
+
max_tokens=350,
|
52 |
+
temperature=0,
|
53 |
+
system=systemC,
|
54 |
+
messages=messageC
|
55 |
+
).content[0].text
|
56 |
+
return chat_completion
|
57 |
|
58 |
return chat_completion.choices[0].message.content
|
59 |
|
|
|
83 |
|
84 |
for index, row in filtred_df.iterrows():
|
85 |
concatenated_content = "\n\n".join(f"{column_name}: {str(row[column_name])}" for column_name in source_cols)
|
86 |
+
print('test')
|
87 |
+
if not concatenated_content == "\n\n".join(f"{column_name}: nan" for column_name in source_cols):
|
88 |
+
print('c bon')
|
89 |
+
llm_answer = ask_llm(prompt, concatenated_content, client)
|
90 |
+
print(f"QUERY:\n{prompt}\nCONTENT:\n{concatenated_content[:200]}...\n\nANSWER:\n{llm_answer}")
|
91 |
+
df.at[index, dest_col] = llm_answer
|
92 |
|
93 |
df.to_excel(file_name, index=False)
|
94 |
return file_name, df.head(5)
|
95 |
|
96 |
+
|
97 |
+
def get_columns(file,progress=gr.Progress()):
|
98 |
if file is not None:
|
99 |
df = pd.read_excel(file)
|
100 |
+
return gr.update(choices=list(df.columns)), gr.update(choices=list(df.columns)), df.head(5)
|
|
|
101 |
else:
|
102 |
+
return gr.update(choices=[]), gr.update(choices=[]), pd.DataFrame()
|