Spaces:
Sleeping
Sleeping
research14
commited on
Commit
•
1d66b8b
1
Parent(s):
c6666cc
edit
Browse files
app.py
CHANGED
@@ -50,7 +50,7 @@ for i, j in zip(ents, ents_prompt):
|
|
50 |
print(i, j)
|
51 |
|
52 |
model_mapping = {
|
53 |
-
'gpt3.5': '
|
54 |
#'vicuna-7b': 'lmsys/vicuna-7b-v1.3',
|
55 |
#'llama-7b': './llama/hf/7B',
|
56 |
}
|
@@ -88,7 +88,7 @@ with open('demonstration_3_42_parse.txt', 'r') as f:
|
|
88 |
theme = gr.themes.Soft()
|
89 |
|
90 |
# issue get request for gpt 3.5
|
91 |
-
gpt_pipeline = pipeline(task="text2text-generation", model="
|
92 |
#vicuna7b_pipeline = pipeline(task="text2text-generation", model="lmsys/vicuna-7b-v1.3")
|
93 |
#llama7b_pipeline = pipeline(task="text2text-generation", model="./llama/hf/7B")
|
94 |
|
|
|
50 |
print(i, j)
|
51 |
|
52 |
model_mapping = {
|
53 |
+
'gpt3.5': 'gpt2',
|
54 |
#'vicuna-7b': 'lmsys/vicuna-7b-v1.3',
|
55 |
#'llama-7b': './llama/hf/7B',
|
56 |
}
|
|
|
88 |
theme = gr.themes.Soft()
|
89 |
|
90 |
# issue get request for gpt 3.5
|
91 |
+
gpt_pipeline = pipeline(task="text2text-generation", model="gpt2")
|
92 |
#vicuna7b_pipeline = pipeline(task="text2text-generation", model="lmsys/vicuna-7b-v1.3")
|
93 |
#llama7b_pipeline = pipeline(task="text2text-generation", model="./llama/hf/7B")
|
94 |
|