Spaces:
Runtime error
Runtime error
yentinglin
commited on
Commit
•
f5b207c
1
Parent(s):
a0b0fa4
Update app.py
Browse files
app.py
CHANGED
@@ -68,7 +68,7 @@ LICENSE = """
|
|
68 |
Taiwan-LLaMa project acknowledges the efforts of the [Meta LLaMa team](https://github.com/facebookresearch/llama) and [Vicuna team](https://github.com/lm-sys/FastChat) in democratizing large language models.
|
69 |
"""
|
70 |
|
71 |
-
DEFAULT_SYSTEM_PROMPT = "你是人工智慧助理,以下是用戶和人工智能助理之間的對話。你要對用戶的問題提供有用、安全、詳細和禮貌的回答。
|
72 |
|
73 |
endpoint_url = os.environ.get("ENDPOINT_URL", "http://127.0.0.1:8080")
|
74 |
MAX_MAX_NEW_TOKENS = 4096
|
@@ -151,6 +151,7 @@ with gr.Blocks() as demo:
|
|
151 |
top_p=top_p,
|
152 |
n=1,
|
153 |
stream=True,
|
|
|
154 |
)
|
155 |
|
156 |
for chunk in response:
|
|
|
68 |
Taiwan-LLaMa project acknowledges the efforts of the [Meta LLaMa team](https://github.com/facebookresearch/llama) and [Vicuna team](https://github.com/lm-sys/FastChat) in democratizing large language models.
|
69 |
"""
|
70 |
|
71 |
+
DEFAULT_SYSTEM_PROMPT = "你是人工智慧助理,以下是用戶和人工智能助理之間的對話。你要對用戶的問題提供有用、安全、詳細和禮貌的回答。"
|
72 |
|
73 |
endpoint_url = os.environ.get("ENDPOINT_URL", "http://127.0.0.1:8080")
|
74 |
MAX_MAX_NEW_TOKENS = 4096
|
|
|
151 |
top_p=top_p,
|
152 |
n=1,
|
153 |
stream=True,
|
154 |
+
stop=["<|eot_id|>"], # 添加停止標記
|
155 |
)
|
156 |
|
157 |
for chunk in response:
|