Sg-at-srijan-us-kg commited on
Commit
96f13df
1 Parent(s): 95addb9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -8
app.py CHANGED
@@ -1,7 +1,7 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
 
4
- client = InferenceClient("Qwen/Qwen2.5-72B-Instruct")
5
 
6
  def respond(
7
  message,
@@ -12,24 +12,33 @@ def respond(
12
  top_p,
13
  file=None
14
  ):
15
- # If a file is uploaded, read its contents and append to the message
16
- if file:
17
- with open(file.name, 'r') as f:
18
- file_content = f.read()
19
- message = f"{file_content}\n\n{message}"
20
-
21
  messages = [{"role": "system", "content": system_message}]
22
 
 
 
 
 
 
 
 
 
 
 
 
 
23
  for val in history:
24
  if val[0]:
25
  messages.append({"role": "user", "content": val[0]})
26
  if val[1]:
27
  messages.append({"role": "assistant", "content": val[1]})
28
 
 
29
  messages.append({"role": "user", "content": message})
30
 
31
  response = ""
32
 
 
33
  for message in client.chat_completion(
34
  messages,
35
  max_tokens=max_tokens,
@@ -60,4 +69,3 @@ demo = gr.ChatInterface(
60
 
61
  if __name__ == "__main__":
62
  demo.launch()
63
-
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
 
4
+ client = InferenceClient("Qwen/Qwen2.5-Coder-32B-Instruct")
5
 
6
  def respond(
7
  message,
 
12
  top_p,
13
  file=None
14
  ):
15
+ # Initialize the messages with the system message
 
 
 
 
 
16
  messages = [{"role": "system", "content": system_message}]
17
 
18
+ # Read file content if a file is uploaded
19
+ if file:
20
+ try:
21
+ with open(file.name, 'r') as f:
22
+ file_content = f.read()
23
+ print("File content:", file_content) # Debug print
24
+ message = f"{file_content}\n\n{message}" # Append file content to message
25
+ except Exception as e:
26
+ print("Error reading file:", e)
27
+ message = f"(Error reading file: {e})\n\n{message}"
28
+
29
+ # Append conversation history
30
  for val in history:
31
  if val[0]:
32
  messages.append({"role": "user", "content": val[0]})
33
  if val[1]:
34
  messages.append({"role": "assistant", "content": val[1]})
35
 
36
+ # Append the latest user message
37
  messages.append({"role": "user", "content": message})
38
 
39
  response = ""
40
 
41
+ # Stream response from the model
42
  for message in client.chat_completion(
43
  messages,
44
  max_tokens=max_tokens,
 
69
 
70
  if __name__ == "__main__":
71
  demo.launch()