Sg-at-srijan-us-kg commited on
Commit
60dc05f
1 Parent(s): e84f2ca

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +54 -36
app.py CHANGED
@@ -4,35 +4,40 @@ from huggingface_hub import InferenceClient
4
 
5
  client = InferenceClient("Qwen/Qwen2.5-Coder-32B-Instruct")
6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
  def respond(
8
  message,
9
  history: list[tuple[str, str]],
10
  system_message,
11
  max_tokens,
12
  temperature,
13
- top_p,
14
- file=None
15
  ):
16
  # Initialize the messages with the system message
17
  messages = [{"role": "system", "content": system_message}]
18
 
19
- # Handle file upload
20
- if file is not None:
21
- try:
22
- # Save the uploaded file to a temporary file
23
- with tempfile.NamedTemporaryFile(delete=False, mode="wb") as temp_file:
24
- temp_file.write(file.read())
25
- temp_file_path = temp_file.name # Store the file path
26
-
27
- # Read the content from the saved file
28
- with open(temp_file_path, "r", encoding="utf-8") as f:
29
- file_content = f.read()
30
- print("File content:", file_content) # Debug print
31
- message = f"{file_content}\n\n{message}" # Append file content to message
32
-
33
- except Exception as e:
34
- print("Error reading file:", e)
35
- message = f"(Error reading file: {e})\n\n{message}"
36
 
37
  # Append conversation history
38
  for val in history:
@@ -58,23 +63,36 @@ def respond(
58
  response += token
59
  yield response
60
 
61
- demo = gr.ChatInterface(
62
- respond,
63
- additional_inputs=[
64
- gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
65
- gr.Slider(minimum=1, maximum=32000, value=2048, step=1, label="Max new tokens"),
66
- gr.Slider(minimum=0.1, maximum=1.0, value=0.7, step=0.1, label="Temperature"),
67
- gr.Slider(
68
- minimum=0.1,
69
- maximum=1.0,
70
- value=0.95,
71
- step=0.05,
72
- label="Top-p (nucleus sampling)"
73
- ),
74
- gr.File(label="Upload a text file", file_types=[".txt"])
75
- ],
76
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
77
 
78
  if __name__ == "__main__":
79
  demo.launch()
80
-
 
4
 
5
  client = InferenceClient("Qwen/Qwen2.5-Coder-32B-Instruct")
6
 
7
+ # A global variable to store file content
8
+ uploaded_file_content = ""
9
+
10
+ # A function to handle file upload and store content
11
+ def handle_file_upload(file):
12
+ global uploaded_file_content
13
+ try:
14
+ with tempfile.NamedTemporaryFile(delete=False, mode="wb") as temp_file:
15
+ temp_file.write(file.read())
16
+ temp_file_path = temp_file.name
17
+
18
+ # Read content from saved file
19
+ with open(temp_file_path, "r", encoding="utf-8") as f:
20
+ uploaded_file_content = f.read()
21
+ return "File uploaded successfully!"
22
+ except Exception as e:
23
+ return f"Error uploading file: {e}"
24
+
25
+ # The main response function
26
  def respond(
27
  message,
28
  history: list[tuple[str, str]],
29
  system_message,
30
  max_tokens,
31
  temperature,
32
+ top_p
 
33
  ):
34
  # Initialize the messages with the system message
35
  messages = [{"role": "system", "content": system_message}]
36
 
37
+ # Add uploaded file content to the message
38
+ global uploaded_file_content
39
+ if uploaded_file_content:
40
+ message = f"{uploaded_file_content}\n\n{message}"
 
 
 
 
 
 
 
 
 
 
 
 
 
41
 
42
  # Append conversation history
43
  for val in history:
 
63
  response += token
64
  yield response
65
 
66
+ # Define the file upload component and button separately
67
+ file_upload = gr.File(label="Upload a text file", file_types=[".txt"])
68
+ upload_button = gr.Button("Upload File")
69
+ upload_output = gr.Textbox(label="Upload Status", interactive=False)
70
+
71
+ # Chat interface with dedicated file upload handling
72
+ demo = gr.Blocks()
73
+
74
+ with demo:
75
+ gr.Markdown("## Chat Interface with File Upload")
76
+ with gr.Row():
77
+ file_upload.render()
78
+ upload_button.click(handle_file_upload, inputs=file_upload, outputs=upload_output)
79
+ upload_output.render()
80
+
81
+ gr.ChatInterface(
82
+ respond,
83
+ additional_inputs=[
84
+ gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
85
+ gr.Slider(minimum=1, maximum=32000, value=2048, step=1, label="Max new tokens"),
86
+ gr.Slider(minimum=0.1, maximum=1.0, value=0.7, step=0.1, label="Temperature"),
87
+ gr.Slider(
88
+ minimum=0.1,
89
+ maximum=1.0,
90
+ value=0.95,
91
+ step=0.05,
92
+ label="Top-p (nucleus sampling)"
93
+ ),
94
+ ],
95
+ )
96
 
97
  if __name__ == "__main__":
98
  demo.launch()