Ali-C137 commited on
Commit
7ba5f92
โ€ข
1 Parent(s): 7b8756a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -8
app.py CHANGED
@@ -6,7 +6,7 @@ from threading import Thread
6
  import torch
7
  import spaces
8
  import gradio as gr
9
- from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig, TextIteratorStreamer
10
 
11
  subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
12
 
@@ -72,7 +72,7 @@ quantization_config = BitsAndBytesConfig(
72
  bnb_4bit_compute_dtype=torch.bfloat16
73
  )
74
  tokenizer = AutoTokenizer.from_pretrained(MODEL_ID)
75
- model = AutoModelForCausalLM.from_pretrained(
76
  MODEL_ID,
77
  device_map="auto",
78
  quantization_config=quantization_config,
@@ -85,12 +85,11 @@ gr.ChatInterface(
85
  title=EMOJI + " " + MODEL_NAME,
86
  description=DESCRIPTION,
87
  examples=[
88
- ["Can you solve the equation 2x + 3 = 11 for x?"],
89
- ["Write an epic poem about Ancient Rome."],
90
- ["Who was the first person to walk on the Moon?"],
91
- ["Use a list comprehension to create a list of squares for numbers from 1 to 10."],
92
- ["Recommend some popular science fiction books."],
93
- ["Can you write a short story about a time-traveling detective?"]
94
  ],
95
  additional_inputs_accordion=gr.Accordion(label="โš™๏ธ Parameters", open=False),
96
  additional_inputs=[
 
6
  import torch
7
  import spaces
8
  import gradio as gr
9
+ from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig, TextIteratorStreamer, AutoModelForSeq2SeqLM
10
 
11
  subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
12
 
 
72
  bnb_4bit_compute_dtype=torch.bfloat16
73
  )
74
  tokenizer = AutoTokenizer.from_pretrained(MODEL_ID)
75
+ model = AutoModelForSeq2SeqLM.from_pretrained(
76
  MODEL_ID,
77
  device_map="auto",
78
  quantization_config=quantization_config,
 
85
  title=EMOJI + " " + MODEL_NAME,
86
  description=DESCRIPTION,
87
  examples=[
88
+ ["ู‡ู„ ูŠู…ูƒู†ูƒ ุญู„ ุงู„ู…ุนุงุฏู„ุฉ 2x + 3 = 11 ู„ู„ู…ุชุบูŠุฑ xุŸ"],
89
+ ["ุงูƒุชุจ ู‚ุตูŠุฏุฉ ู…ู„ุญู…ูŠุฉ ุนู† ู…ุฏูŠู†ุฉ ุงู„ู‚ุฏุณ ุงู„ุดุฑูŠูุฉ."],
90
+ ["ู…ู† ูƒุงู† ุฃูˆู„ ุดุฎุต ูŠู…ุดูŠ ุนู„ู‰ ุณุทุญ ุงู„ู‚ู…ุฑุŸ"],
91
+ ["ุฃูˆุตู ุจุจุนุถ ูƒุชุจ ุงู„ุฎูŠุงู„ ุงู„ุนู„ู…ูŠ ุงู„ุดู‡ูŠุฑุฉ."],
92
+ ["ู‡ู„ ูŠู…ูƒู†ูƒ ูƒุชุงุจุฉ ู‚ุตุฉ ู‚ุตูŠุฑุฉ ุนู† ู…ุญู‚ู‚ ูŠุณุงูุฑ ุนุจุฑ ุงู„ุฒู…ู†ุŸ"]
 
93
  ],
94
  additional_inputs_accordion=gr.Accordion(label="โš™๏ธ Parameters", open=False),
95
  additional_inputs=[