eliebak HF staff commited on
Commit
49dac5b
β€’
1 Parent(s): 55e69ee

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -49
app.py CHANGED
@@ -6,21 +6,13 @@ import torch
6
  import spaces
7
 
8
  # Initialize the client with your model
9
- client = InferenceClient("karpathy/gpt2_1558M_final2_hf") # Replace with your model's name or endpoint
10
-
11
- default_system = 'You are a helpful assistant'
12
 
13
  @spaces.GPU
14
- def respond(message, history, system_message, max_tokens, temperature, top_p):
15
- # Combine system message, history, and new message
16
- full_prompt = f"{system_message}\n\n"
17
- for user, assistant in history:
18
- full_prompt += f"Human: {user}\nAssistant: {assistant}\n"
19
- full_prompt += f"Human: {message}\nAssistant:"
20
-
21
  response = ""
22
  for chunk in client.text_generation(
23
- full_prompt,
24
  max_new_tokens=max_tokens,
25
  stream=True,
26
  temperature=temperature,
@@ -32,51 +24,28 @@ def respond(message, history, system_message, max_tokens, temperature, top_p):
32
  response += chunk.token.text
33
  elif hasattr(chunk, 'generated_text'):
34
  response += chunk.generated_text
35
- yield history + [(message, response)]
36
 
37
- # If the response is empty, yield a default message
38
  if not response:
39
- yield history + [(message, "I apologize, but I couldn't generate a response.")]
40
-
41
- def clear_session():
42
- return "", []
43
-
44
- def modify_system_session(system):
45
- if not system:
46
- system = default_system
47
- return system, system, []
48
 
49
- def use_example(example):
50
- return example
51
-
52
- def set_unicorn_example():
53
- return unicorn_example
54
-
55
- def set_time_travel_example():
56
- return time_travel_example
57
 
58
  # Define example prompts
59
  unicorn_example = "In a shocking finding, scientist discovered a herd of unicorns living in a remote, previously unexplored valley, in the Andes Mountains. Even more surprising to the researchers was the fact that the unicorns spoke perfect English."
60
  time_travel_example = "Explain the grandfather paradox in time travel and propose a potential resolution."
61
 
62
  with gr.Blocks() as demo:
63
- gr.Markdown("<h1 style='text-align: center;'>LLM.C 1.5B Chat Demo (GPT-2 1.5B)</h1>")
64
-
65
- with gr.Row():
66
- with gr.Column(scale=3):
67
- system_input = gr.Textbox(value=default_system, lines=1, label='System Prompt')
68
- with gr.Column(scale=1):
69
- modify_system = gr.Button("πŸ› οΈ Set system prompt and clear history")
70
 
71
- system_state = gr.Textbox(value=default_system, visible=False)
72
- chatbot = gr.Chatbot(label='LLM.C Chat')
73
- message = gr.Textbox(lines=1, label='Your message')
74
 
75
  with gr.Row():
76
- clear_history = gr.Button("🧹 Clear history")
77
- submit = gr.Button("πŸš€ Send")
78
 
79
- # New section for example prompts
80
  gr.Markdown("### Example prompts")
81
  with gr.Row():
82
  example1 = gr.Button("πŸ¦„ Unicorn Discovery")
@@ -88,12 +57,10 @@ with gr.Blocks() as demo:
88
  top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (Nucleus Sampling)")
89
 
90
  # Set up event handlers
91
- message.submit(respond, inputs=[message, chatbot, system_state, max_tokens, temperature, top_p], outputs=[chatbot])
92
- submit.click(respond, inputs=[message, chatbot, system_state, max_tokens, temperature, top_p], outputs=[chatbot])
93
- clear_history.click(fn=clear_session, inputs=[], outputs=[message, chatbot])
94
- modify_system.click(fn=modify_system_session, inputs=[system_input], outputs=[system_state, system_input, chatbot])
95
- example1.click(fn=set_unicorn_example, inputs=[], outputs=[message])
96
- example2.click(fn=set_time_travel_example, inputs=[], outputs=[message])
97
 
98
  gr.Markdown(
99
  """
 
6
  import spaces
7
 
8
  # Initialize the client with your model
9
+ client = InferenceClient("karpathy/gpt2_1558M_final2_hf")
 
 
10
 
11
  @spaces.GPU
12
+ def generate_text(prompt, max_tokens, temperature, top_p):
 
 
 
 
 
 
13
  response = ""
14
  for chunk in client.text_generation(
15
+ prompt,
16
  max_new_tokens=max_tokens,
17
  stream=True,
18
  temperature=temperature,
 
24
  response += chunk.token.text
25
  elif hasattr(chunk, 'generated_text'):
26
  response += chunk.generated_text
27
+ yield response
28
 
 
29
  if not response:
30
+ yield "I apologize, but I couldn't generate a response."
 
 
 
 
 
 
 
 
31
 
32
+ def clear_input():
33
+ return ""
 
 
 
 
 
 
34
 
35
  # Define example prompts
36
  unicorn_example = "In a shocking finding, scientist discovered a herd of unicorns living in a remote, previously unexplored valley, in the Andes Mountains. Even more surprising to the researchers was the fact that the unicorns spoke perfect English."
37
  time_travel_example = "Explain the grandfather paradox in time travel and propose a potential resolution."
38
 
39
  with gr.Blocks() as demo:
40
+ gr.Markdown("<h1 style='text-align: center;'>LLM.C 1.5B Demo</h1>")
 
 
 
 
 
 
41
 
42
+ prompt = gr.Textbox(lines=3, label='Enter your prompt')
43
+ output = gr.Textbox(lines=10, label='Generated text')
 
44
 
45
  with gr.Row():
46
+ clear_button = gr.Button("🧹 Clear input")
47
+ submit = gr.Button("πŸš€ Generate")
48
 
 
49
  gr.Markdown("### Example prompts")
50
  with gr.Row():
51
  example1 = gr.Button("πŸ¦„ Unicorn Discovery")
 
57
  top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (Nucleus Sampling)")
58
 
59
  # Set up event handlers
60
+ submit.click(generate_text, inputs=[prompt, max_tokens, temperature, top_p], outputs=output)
61
+ clear_button.click(clear_input, inputs=[], outputs=prompt)
62
+ example1.click(lambda: unicorn_example, inputs=[], outputs=prompt)
63
+ example2.click(lambda: time_travel_example, inputs=[], outputs=prompt)
 
 
64
 
65
  gr.Markdown(
66
  """