Uglevod7 commited on
Commit
225f2ef
1 Parent(s): 2beedbe
Files changed (1) hide show
  1. app.py +9 -2
app.py CHANGED
@@ -1,3 +1,4 @@
 
1
  from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
2
 
3
  model_name_or_path = "TheBloke/Unholy-v1-12L-13B-GPTQ"
@@ -6,7 +7,9 @@ model_name_or_path = "TheBloke/Unholy-v1-12L-13B-GPTQ"
6
  model = AutoModelForCausalLM.from_pretrained(model_name_or_path,
7
  device_map="auto",
8
  trust_remote_code=False,
9
- revision="main")
 
 
10
 
11
  tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, use_fast=True)
12
 
@@ -41,4 +44,8 @@ pipe = pipeline(
41
  repetition_penalty=1.1
42
  )
43
 
44
- print(pipe(prompt_template)[0]['generated_text'])
 
 
 
 
 
1
+ import streamlit as st
2
  from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
3
 
4
  model_name_or_path = "TheBloke/Unholy-v1-12L-13B-GPTQ"
 
7
  model = AutoModelForCausalLM.from_pretrained(model_name_or_path,
8
  device_map="auto",
9
  trust_remote_code=False,
10
+ revision="main",
11
+ disable_exllama=True
12
+ )
13
 
14
  tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, use_fast=True)
15
 
 
44
  repetition_penalty=1.1
45
  )
46
 
47
+ print(pipe(prompt_template)[0]['generated_text'])
48
+
49
+ #t=prompt_template)[0]['generated_text']
50
+
51
+ st.json(pipe(prompt_template))