baconnier commited on
Commit
30c6ca6
1 Parent(s): 7089906

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +70 -14
app.py CHANGED
@@ -116,8 +116,68 @@ class GradioInterface:
116
  position: relative;
117
  }
118
 
119
- .container::before {
120
- content: attr(data-title);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
121
  position: absolute;
122
  top: -12px;
123
  left: 20px;
@@ -131,15 +191,13 @@ class GradioInterface:
131
 
132
  with gr.Blocks(css=custom_css) as self.interface:
133
  # Title Container
134
- with gr.Column(elem_classes="container", elem_id="title-container") as title_container:
135
- title_container.dataset["title"] = "PROMPT++"
136
  gr.Markdown("# PROMPT++")
137
  gr.Markdown("### Automating Prompt Engineering by Refining your Prompts")
138
  gr.Markdown("Learn how to generate an improved version of your prompts. Enter a main idea for a prompt, choose a meta prompt, and the model will attempt to generate an improved version.")
139
 
140
  # Input Container
141
- with gr.Column(elem_classes="container", elem_id="input-container") as input_container:
142
- input_container.dataset["title"] = "PROMPT REFINEMENT"
143
  gr.Markdown("## Refine Prompt")
144
  with gr.Row():
145
  prompt_text = gr.Textbox(label="Type the prompt (or let it empty to see metaprompt)")
@@ -154,8 +212,7 @@ class GradioInterface:
154
  refine_button = gr.Button("Refine Prompt")
155
 
156
  # Analysis Container
157
- with gr.Column(elem_classes="container", elem_id="analysis-container") as analysis_container:
158
- analysis_container.dataset["title"] = "ANALYSIS & REFINEMENT"
159
  with gr.Row():
160
  gr.Markdown("### Initial prompt analysis")
161
  with gr.Column():
@@ -175,8 +232,7 @@ class GradioInterface:
175
  )
176
 
177
  # Model Application Container
178
- with gr.Column(elem_classes="container", elem_id="model-container") as model_container:
179
- model_container.dataset["title"] = "MODEL APPLICATION"
180
  gr.Markdown("## See MetaPrompt Impact")
181
  with gr.Row():
182
  apply_model = gr.Dropdown(
@@ -195,8 +251,7 @@ class GradioInterface:
195
  apply_button = gr.Button("Apply MetaPrompt")
196
 
197
  # Results Container
198
- with gr.Column(elem_classes="container", elem_id="results-container") as results_container:
199
- results_container.dataset["title"] = "RESULTS"
200
  with gr.Tab("Original Prompt Output"):
201
  original_output = gr.Markdown(label="Original Prompt Output")
202
  with gr.Tab("Refined Prompt Output"):
@@ -209,8 +264,7 @@ class GradioInterface:
209
  )
210
 
211
  # Examples Container
212
- with gr.Column(elem_classes="container", elem_id="examples-container") as examples_container:
213
- examples_container.dataset["title"] = "EXAMPLES"
214
  with gr.Accordion("Examples", open=True):
215
  gr.Examples(
216
  examples=[
@@ -228,6 +282,8 @@ class GradioInterface:
228
  inputs=[prompt_text, meta_prompt_choice]
229
  )
230
 
 
 
231
  # Rest of the class methods remain the same
232
  def refine_prompt(self, prompt: str, meta_prompt_choice: str) -> tuple:
233
  input_data = PromptInput(text=prompt, meta_prompt_choice=meta_prompt_choice)
 
116
  position: relative;
117
  }
118
 
119
+ .title-container::before {
120
+ content: 'PROMPT++';
121
+ position: absolute;
122
+ top: -12px;
123
+ left: 20px;
124
+ background: var(--background-fill-primary);
125
+ padding: 0 10px;
126
+ color: var(--primary-500);
127
+ font-weight: bold;
128
+ font-size: 1.2em;
129
+ }
130
+
131
+ .input-container::before {
132
+ content: 'PROMPT REFINEMENT';
133
+ position: absolute;
134
+ top: -12px;
135
+ left: 20px;
136
+ background: var(--background-fill-primary);
137
+ padding: 0 10px;
138
+ color: var(--primary-500);
139
+ font-weight: bold;
140
+ font-size: 1.2em;
141
+ }
142
+
143
+ .analysis-container::before {
144
+ content: 'ANALYSIS & REFINEMENT';
145
+ position: absolute;
146
+ top: -12px;
147
+ left: 20px;
148
+ background: var(--background-fill-primary);
149
+ padding: 0 10px;
150
+ color: var(--primary-500);
151
+ font-weight: bold;
152
+ font-size: 1.2em;
153
+ }
154
+
155
+ .model-container::before {
156
+ content: 'MODEL APPLICATION';
157
+ position: absolute;
158
+ top: -12px;
159
+ left: 20px;
160
+ background: var(--background-fill-primary);
161
+ padding: 0 10px;
162
+ color: var(--primary-500);
163
+ font-weight: bold;
164
+ font-size: 1.2em;
165
+ }
166
+
167
+ .results-container::before {
168
+ content: 'RESULTS';
169
+ position: absolute;
170
+ top: -12px;
171
+ left: 20px;
172
+ background: var(--background-fill-primary);
173
+ padding: 0 10px;
174
+ color: var(--primary-500);
175
+ font-weight: bold;
176
+ font-size: 1.2em;
177
+ }
178
+
179
+ .examples-container::before {
180
+ content: 'EXAMPLES';
181
  position: absolute;
182
  top: -12px;
183
  left: 20px;
 
191
 
192
  with gr.Blocks(css=custom_css) as self.interface:
193
  # Title Container
194
+ with gr.Column(elem_classes=["container", "title-container"]):
 
195
  gr.Markdown("# PROMPT++")
196
  gr.Markdown("### Automating Prompt Engineering by Refining your Prompts")
197
  gr.Markdown("Learn how to generate an improved version of your prompts. Enter a main idea for a prompt, choose a meta prompt, and the model will attempt to generate an improved version.")
198
 
199
  # Input Container
200
+ with gr.Column(elem_classes=["container", "input-container"]):
 
201
  gr.Markdown("## Refine Prompt")
202
  with gr.Row():
203
  prompt_text = gr.Textbox(label="Type the prompt (or let it empty to see metaprompt)")
 
212
  refine_button = gr.Button("Refine Prompt")
213
 
214
  # Analysis Container
215
+ with gr.Column(elem_classes=["container", "analysis-container"]):
 
216
  with gr.Row():
217
  gr.Markdown("### Initial prompt analysis")
218
  with gr.Column():
 
232
  )
233
 
234
  # Model Application Container
235
+ with gr.Column(elem_classes=["container", "model-container"]):
 
236
  gr.Markdown("## See MetaPrompt Impact")
237
  with gr.Row():
238
  apply_model = gr.Dropdown(
 
251
  apply_button = gr.Button("Apply MetaPrompt")
252
 
253
  # Results Container
254
+ with gr.Column(elem_classes=["container", "results-container"]):
 
255
  with gr.Tab("Original Prompt Output"):
256
  original_output = gr.Markdown(label="Original Prompt Output")
257
  with gr.Tab("Refined Prompt Output"):
 
264
  )
265
 
266
  # Examples Container
267
+ with gr.Column(elem_classes=["container", "examples-container"]):
 
268
  with gr.Accordion("Examples", open=True):
269
  gr.Examples(
270
  examples=[
 
282
  inputs=[prompt_text, meta_prompt_choice]
283
  )
284
 
285
+ # Rest of the class methods remain the same...
286
+
287
  # Rest of the class methods remain the same
288
  def refine_prompt(self, prompt: str, meta_prompt_choice: str) -> tuple:
289
  input_data = PromptInput(text=prompt, meta_prompt_choice=meta_prompt_choice)