Update app.py
Browse files
app.py
CHANGED
@@ -4,10 +4,10 @@ from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
|
4 |
model_name_or_path = "TheBloke/Unholy-v1-12L-13B-GPTQ"
|
5 |
# To use a different branch, change revision
|
6 |
# For example: revision="main"
|
7 |
-
model = AutoModelForCausalLM.from_pretrained(model_name_or_path,
|
8 |
device_map="auto",
|
9 |
trust_remote_code=False,
|
10 |
-
revision="main"
|
11 |
|
12 |
)
|
13 |
|
@@ -41,7 +41,7 @@ pipe = pipeline(
|
|
41 |
temperature=0.7,
|
42 |
top_p=0.95,
|
43 |
top_k=40,
|
44 |
-
repetition_penalty=1.1
|
45 |
)
|
46 |
|
47 |
print(pipe(prompt_template)[0]['generated_text'])
|
|
|
4 |
model_name_or_path = "TheBloke/Unholy-v1-12L-13B-GPTQ"
|
5 |
# To use a different branch, change revision
|
6 |
# For example: revision="main"
|
7 |
+
model = AutoModelForCausalLM.from_pretrained( model_name_or_path,
|
8 |
device_map="auto",
|
9 |
trust_remote_code=False,
|
10 |
+
revision="main",device_map="cuda:0"
|
11 |
|
12 |
)
|
13 |
|
|
|
41 |
temperature=0.7,
|
42 |
top_p=0.95,
|
43 |
top_k=40,
|
44 |
+
repetition_penalty=1.1
|
45 |
)
|
46 |
|
47 |
print(pipe(prompt_template)[0]['generated_text'])
|