mikemin027 commited on
Commit
5540630
1 Parent(s): 7696148

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -23
app.py CHANGED
@@ -2,9 +2,7 @@ import gradio as gr
2
  from huggingface_hub import InferenceClient
3
  from transformers import pipeline
4
 
5
- client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
6
-
7
-
8
  def respond(
9
  message,
10
  history: list[tuple[str, str]],
@@ -13,30 +11,30 @@ def respond(
13
  temperature,
14
  top_p,
15
  ):
 
16
  messages = [
17
- {"role": "user", "content": "Who are you?"},
18
- ]
19
- pipe = pipeline("text-generation", model="codefuse-ai/CodeFuse-DeepSeek-33B")
20
- pipe(messages)
21
-
22
- response = ""
23
-
 
 
 
24
  for message in client.chat_completion(
25
- messages,
26
- max_tokens=max_tokens,
27
- stream=True,
28
- temperature=temperature,
29
- top_p=top_p,
30
- ):
31
  token = message.choices[0].delta.content
32
-
33
  response += token
34
  yield response
35
 
36
-
37
- """
38
- For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
39
- """
40
  demo = gr.ChatInterface(
41
  respond,
42
  additional_inputs=[
@@ -53,6 +51,6 @@ demo = gr.ChatInterface(
53
  ],
54
  )
55
 
56
-
57
  if __name__ == "__main__":
58
- demo.launch()
 
2
  from huggingface_hub import InferenceClient
3
  from transformers import pipeline
4
 
5
+ # Define the respond function
 
 
6
  def respond(
7
  message,
8
  history: list[tuple[str, str]],
 
11
  temperature,
12
  top_p,
13
  ):
14
+ # Define the initial message for the chat
15
  messages = [
16
+ {"role": "user", "content": message},
17
+ ]
18
+
19
+ # Create a pipeline for text generation
20
+ pipe = pipeline("text-generation", model="codefuse-ai/CodeFuse-DeepSeek-33B")
21
+ pipe(messages)
22
+
23
+ response = ""
24
+
25
+ # Use the InferenceClient to get responses
26
  for message in client.chat_completion(
27
+ messages,
28
+ max_tokens=max_tokens,
29
+ stream=True,
30
+ temperature=temperature,
31
+ top_p=top_p,
32
+ ):
33
  token = message.choices[0].delta.content
 
34
  response += token
35
  yield response
36
 
37
+ # Setup Gradio interface
 
 
 
38
  demo = gr.ChatInterface(
39
  respond,
40
  additional_inputs=[
 
51
  ],
52
  )
53
 
54
+ # Launch the Gradio app
55
  if __name__ == "__main__":
56
+ demo.launch()