ancerlop commited on
Commit
2892c38
1 Parent(s): 5f7f65d

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +87 -0
app.py ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import json
3
+ from huggingface_hub import InferenceClient
4
+
5
+ client = InferenceClient(
6
+ "mistralai/Mistral-7B-Instruct-v0.1"
7
+ )
8
+
9
+ rag_text = "Este es el texto RAG"
10
+ prompt_template_text = "Este es el texto del template de prompt"
11
+
12
+ def format_prompt(message):
13
+ prompt = "<s>"
14
+ prompt += f"[INST] {message} [/INST]"
15
+ return prompt
16
+
17
+ def generate(prompt):
18
+
19
+ generate_kwargs = dict(
20
+ temperature=0.9,
21
+ max_new_tokens=1024,
22
+ top_p=0.95,
23
+ repetition_penalty=1.0,
24
+ do_sample=True,
25
+ seed=42,
26
+ )
27
+
28
+ formatted_prompt = format_prompt(prompt)
29
+
30
+ stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
31
+ output = ""
32
+
33
+ for response in stream:
34
+ output += response.token.text
35
+ yield output
36
+ return output
37
+
38
+
39
+ def process_input(text, rag, prompt_template):
40
+ prompt = text
41
+ if rag:
42
+ prompt += rag_text
43
+ if prompt_template:
44
+ prompt += prompt_template_text
45
+ json_text = generate(prompt)
46
+
47
+ # Convertimos el generador en una cadena JSON
48
+ json_str = ''.join(json_text)
49
+
50
+ # Convertimos la cadena JSON en un objeto JSON
51
+ json_obj = json.loads(json_str)
52
+
53
+ # Generamos el archivo JSON
54
+ with open('output.json', 'w') as f:
55
+ json.dump(json_obj, f)
56
+
57
+ return text
58
+
59
+ def create_interface():
60
+ # Definimos los componentes de la interfaz
61
+ input_text = gr.Textbox(label="Input")
62
+ rag_checkbox = gr.Checkbox(label="RAG")
63
+ prompt_template = gr.Checkbox(label="PromptTemplate")
64
+ output_text = gr.Textbox(label="Output")
65
+ classification_types_checkboxes = gr.CheckboxGroup(label="Clasificacion Tipo")
66
+
67
+ # Definimos la función que se ejecutará cuando se envíe la entrada
68
+ def fn(text, rag, prompt_template):
69
+ output = process_input(text, rag, prompt_template)
70
+ with open('output.json', 'r') as f:
71
+ data = json.load(f)
72
+ classification_types = [item['clasificacion_tipo'] for item in data]
73
+ classification_types_options = [(option, option) for option in classification_types]
74
+ classification_types_checkboxes = gr.CheckboxGroup(label="Clasificacion Tipo", choices=classification_types_options, interactive = True)
75
+ return output, classification_types_checkboxes
76
+
77
+ # Creamos la interfaz
78
+ iface = gr.Interface(
79
+ fn=fn,
80
+ inputs=[input_text, rag_checkbox, prompt_template],
81
+ outputs=[output_text, classification_types_checkboxes]
82
+ )
83
+
84
+ return iface
85
+
86
+ iface = create_interface()
87
+ iface.launch()