Update app.py
Browse files
app.py
CHANGED
@@ -62,11 +62,11 @@ def predict(text,
|
|
62 |
x = x[:x.index("[|AI|]")].strip()
|
63 |
x = x.strip()
|
64 |
a, b= [[y[0],convert_to_markdown(y[1])] for y in history]+[[text, convert_to_markdown(x)]],history + [[text,x]]
|
65 |
-
yield a, b, "
|
66 |
if shared_state.interrupted:
|
67 |
shared_state.recover()
|
68 |
try:
|
69 |
-
yield a, b, "Stop:
|
70 |
return
|
71 |
except:
|
72 |
pass
|
@@ -75,7 +75,7 @@ def predict(text,
|
|
75 |
torch.cuda.empty_cache()
|
76 |
|
77 |
try:
|
78 |
-
yield a,b,"
|
79 |
except:
|
80 |
pass
|
81 |
|
@@ -87,13 +87,11 @@ def reset_chat():
|
|
87 |
|
88 |
|
89 |
##########################################################
|
90 |
-
#Übersetzungs Ki nutzen
|
91 |
def translate():
|
92 |
-
return "
|
93 |
|
94 |
-
#Programmcode KI
|
95 |
def coding():
|
96 |
-
return "
|
97 |
|
98 |
#######################################################################
|
99 |
#Darstellung mit Gradio
|
@@ -104,34 +102,34 @@ with open("custom.css", "r", encoding="utf-8") as f:
|
|
104 |
with gr.Blocks(theme=small_and_beautiful_theme) as demo:
|
105 |
history = gr.State([])
|
106 |
user_question = gr.State("")
|
107 |
-
gr.Markdown("Scegli cosa vuoi provare")
|
108 |
with gr.Tabs():
|
109 |
with gr.TabItem("Chat"):
|
110 |
with gr.Row():
|
111 |
gr.HTML(title)
|
112 |
-
status_display = gr.Markdown("
|
113 |
gr.Markdown(description_top)
|
114 |
with gr.Row(scale=1).style(equal_height=True):
|
115 |
with gr.Column(scale=5):
|
116 |
with gr.Row(scale=1):
|
117 |
-
chatbotGr = gr.Chatbot(elem_id="
|
118 |
with gr.Row(scale=1):
|
119 |
with gr.Column(scale=12):
|
120 |
user_input = gr.Textbox(
|
121 |
-
show_label=False, placeholder="
|
122 |
).style(container=False)
|
123 |
with gr.Column(min_width=100, scale=1):
|
124 |
-
submitBtn = gr.Button("
|
125 |
with gr.Column(min_width=100, scale=1):
|
126 |
-
cancelBtn = gr.Button("
|
127 |
with gr.Row(scale=1):
|
128 |
emptyBtn = gr.Button(
|
129 |
-
"🧹
|
130 |
)
|
131 |
with gr.Column():
|
132 |
with gr.Column(min_width=50, scale=1):
|
133 |
-
with gr.Tab(label="
|
134 |
-
gr.Markdown("#
|
135 |
top_p = gr.Slider(
|
136 |
minimum=-0,
|
137 |
maximum=1.0,
|
@@ -146,7 +144,7 @@ with gr.Blocks(theme=small_and_beautiful_theme) as demo:
|
|
146 |
value=1,
|
147 |
step=0.1,
|
148 |
interactive=True,
|
149 |
-
label="
|
150 |
)
|
151 |
max_length_tokens = gr.Slider(
|
152 |
minimum=0,
|
@@ -154,7 +152,7 @@ with gr.Blocks(theme=small_and_beautiful_theme) as demo:
|
|
154 |
value=512,
|
155 |
step=8,
|
156 |
interactive=True,
|
157 |
-
label="
|
158 |
)
|
159 |
max_context_length_tokens = gr.Slider(
|
160 |
minimum=0,
|
@@ -162,7 +160,7 @@ with gr.Blocks(theme=small_and_beautiful_theme) as demo:
|
|
162 |
value=2048,
|
163 |
step=128,
|
164 |
interactive=True,
|
165 |
-
label="
|
166 |
)
|
167 |
gr.Markdown(description)
|
168 |
|
|
|
62 |
x = x[:x.index("[|AI|]")].strip()
|
63 |
x = x.strip()
|
64 |
a, b= [[y[0],convert_to_markdown(y[1])] for y in history]+[[text, convert_to_markdown(x)]],history + [[text,x]]
|
65 |
+
yield a, b, "Sto elaborando ..."
|
66 |
if shared_state.interrupted:
|
67 |
shared_state.recover()
|
68 |
try:
|
69 |
+
yield a, b, "Stop: OK"
|
70 |
return
|
71 |
except:
|
72 |
pass
|
|
|
75 |
torch.cuda.empty_cache()
|
76 |
|
77 |
try:
|
78 |
+
yield a,b,"Generazione: OK"
|
79 |
except:
|
80 |
pass
|
81 |
|
|
|
87 |
|
88 |
|
89 |
##########################################################
|
|
|
90 |
def translate():
|
91 |
+
return "In costruzione"
|
92 |
|
|
|
93 |
def coding():
|
94 |
+
return "In costruzione"
|
95 |
|
96 |
#######################################################################
|
97 |
#Darstellung mit Gradio
|
|
|
102 |
with gr.Blocks(theme=small_and_beautiful_theme) as demo:
|
103 |
history = gr.State([])
|
104 |
user_question = gr.State("")
|
105 |
+
gr.Markdown("Scegli cosa vuoi provare:")
|
106 |
with gr.Tabs():
|
107 |
with gr.TabItem("Chat"):
|
108 |
with gr.Row():
|
109 |
gr.HTML(title)
|
110 |
+
status_display = gr.Markdown("OK", elem_id="status_display")
|
111 |
gr.Markdown(description_top)
|
112 |
with gr.Row(scale=1).style(equal_height=True):
|
113 |
with gr.Column(scale=5):
|
114 |
with gr.Row(scale=1):
|
115 |
+
chatbotGr = gr.Chatbot(elem_id="Chat").style(height="100%")
|
116 |
with gr.Row(scale=1):
|
117 |
with gr.Column(scale=12):
|
118 |
user_input = gr.Textbox(
|
119 |
+
show_label=False, placeholder="Inserisci il tuo testo / domanda"
|
120 |
).style(container=False)
|
121 |
with gr.Column(min_width=100, scale=1):
|
122 |
+
submitBtn = gr.Button("Invia")
|
123 |
with gr.Column(min_width=100, scale=1):
|
124 |
+
cancelBtn = gr.Button("Cancella")
|
125 |
with gr.Row(scale=1):
|
126 |
emptyBtn = gr.Button(
|
127 |
+
"🧹 Nuova Chat",
|
128 |
)
|
129 |
with gr.Column():
|
130 |
with gr.Column(min_width=50, scale=1):
|
131 |
+
with gr.Tab(label="Parametri del modello"):
|
132 |
+
gr.Markdown("# Parametri")
|
133 |
top_p = gr.Slider(
|
134 |
minimum=-0,
|
135 |
maximum=1.0,
|
|
|
144 |
value=1,
|
145 |
step=0.1,
|
146 |
interactive=True,
|
147 |
+
label="Temperatura",
|
148 |
)
|
149 |
max_length_tokens = gr.Slider(
|
150 |
minimum=0,
|
|
|
152 |
value=512,
|
153 |
step=8,
|
154 |
interactive=True,
|
155 |
+
label="Numeno massimo di parole",
|
156 |
)
|
157 |
max_context_length_tokens = gr.Slider(
|
158 |
minimum=0,
|
|
|
160 |
value=2048,
|
161 |
step=128,
|
162 |
interactive=True,
|
163 |
+
label="Numero massimo di parole memorizzate",
|
164 |
)
|
165 |
gr.Markdown(description)
|
166 |
|