alfredplpl
commited on
Commit
β’
2c778fe
1
Parent(s):
566c94b
Update app.py
Browse files
app.py
CHANGED
@@ -24,7 +24,7 @@ Built with Meta Llama 3
|
|
24 |
|
25 |
PLACEHOLDER = """
|
26 |
<div style="padding: 30px; text-align: center; display: flex; flex-direction: column; align-items: center;">
|
27 |
-
<h1 style="font-size: 28px; margin-bottom: 2px; opacity: 0.55;">
|
28 |
<p style="font-size: 18px; margin-bottom: 2px; opacity: 0.65;">γͺγγ§γγγγ¦γ</p>
|
29 |
</div>
|
30 |
"""
|
@@ -81,9 +81,9 @@ def chat_llama3_8b(message: str,
|
|
81 |
do_sample=True,
|
82 |
temperature=temperature,
|
83 |
top_p=0.95,
|
84 |
-
repetition_penalty=1.1
|
85 |
-
eos_token_id=terminators,
|
86 |
)
|
|
|
87 |
# This will enforce greedy generation (do_sample=False) when the temperature is passed 0, avoiding the crash.
|
88 |
if temperature == 0:
|
89 |
generate_kwargs['do_sample'] = False
|
|
|
24 |
|
25 |
PLACEHOLDER = """
|
26 |
<div style="padding: 30px; text-align: center; display: flex; flex-direction: column; align-items: center;">
|
27 |
+
<h1 style="font-size: 28px; margin-bottom: 2px; opacity: 0.55;">LLM-jp-3-13B</h1>
|
28 |
<p style="font-size: 18px; margin-bottom: 2px; opacity: 0.65;">γͺγγ§γγγγ¦γ</p>
|
29 |
</div>
|
30 |
"""
|
|
|
81 |
do_sample=True,
|
82 |
temperature=temperature,
|
83 |
top_p=0.95,
|
84 |
+
repetition_penalty=1.1
|
|
|
85 |
)
|
86 |
+
|
87 |
# This will enforce greedy generation (do_sample=False) when the temperature is passed 0, avoiding the crash.
|
88 |
if temperature == 0:
|
89 |
generate_kwargs['do_sample'] = False
|