Turkunov Y commited on
Commit
79a28b0
1 Parent(s): b746c52

Removed bitsandbytes

Browse files
Files changed (2) hide show
  1. app.py +2 -2
  2. requirements.txt +1 -3
app.py CHANGED
@@ -1,6 +1,6 @@
1
  import gradio as gr
2
  from textPreprocessing import text2prompt
3
- from transformers import AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig
4
  import torch
5
 
6
  """
@@ -30,7 +30,7 @@ def predict(input_text, t, m):
30
  - Instruct-based модель
31
  """
32
  prompt = text2prompt(input_text)
33
- inputs = tokenizer(prompt, return_tensors="np")
34
  generate_ids = model.generate(inputs.input_ids, max_new_tokens=128)
35
  answer = tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
36
  return answer.replace(prompt, "")
 
1
  import gradio as gr
2
  from textPreprocessing import text2prompt
3
+ from transformers import AutoTokenizer, AutoModelForCausalLM
4
  import torch
5
 
6
  """
 
30
  - Instruct-based модель
31
  """
32
  prompt = text2prompt(input_text)
33
+ inputs = tokenizer(prompt, return_tensors="pt")
34
  generate_ids = model.generate(inputs.input_ids, max_new_tokens=128)
35
  answer = tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
36
  return answer.replace(prompt, "")
requirements.txt CHANGED
@@ -1,4 +1,2 @@
1
  transformers
2
- torch
3
- bitsandbytes
4
- accelerate
 
1
  transformers
2
+ torch