File size: 2,562 Bytes
2892c38
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8a1ae64
2892c38
 
 
 
 
 
 
 
 
 
e4553fc
2892c38
 
e4553fc
2892c38
 
6a93d74
2892c38
 
6a93d74
 
2892c38
e4553fc
2892c38
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
db0ead9
 
e25a60f
 
db0ead9
 
2892c38
 
 
 
db0ead9
 
2892c38
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
import gradio as gr
import json
from huggingface_hub import InferenceClient

client = InferenceClient(
    "mistralai/Mistral-7B-Instruct-v0.1"
)

rag_text = "Este es el texto RAG"
prompt_template_text = "Este es el texto del template de prompt"

def format_prompt(message):
  prompt = "<s>"
  prompt += f"[INST] {message} [/INST]"
  return prompt

def generate(prompt):

    generate_kwargs = dict(
        temperature=0.9,
        max_new_tokens=1024,
        top_p=0.95,
        repetition_penalty=1.0,
        do_sample=True,
        seed=42,
    )

    formatted_prompt = format_prompt(prompt)

    output = client.text_generation(formatted_prompt, **generate_kwargs)

    return output


def process_input(text, rag, prompt_template):
    prompt = text
    if rag:
        prompt += rag_text
    if prompt_template:
        prompt += prompt_template_text
    output = generate(prompt)
    
    # Convertimos el generador en una cadena JSON
    json_str = ''.join(output)

    # Convertimos la cadena JSON en un objeto JSON
    json_obj = json.loads(json_str)

    # Generamos el archivo JSON
    with open('output.json', 'w') as f:
        json.dump(json_obj, f)
    
    return output

def create_interface():
    # Definimos los componentes de la interfaz
    input_text = gr.Textbox(label="Input")
    rag_checkbox = gr.Checkbox(label="RAG")
    prompt_template = gr.Checkbox(label="PromptTemplate")
    output_text = gr.Textbox(label="Output")
    classification_types_checkboxes = gr.CheckboxGroup(label="Clasificacion Tipo")

    # Definimos la función que se ejecutará cuando se envíe la entrada
    def fn(text, rag, prompt_template):
        output = process_input(text, rag, prompt_template)
        with open('output.json', 'r') as f:
            data = json.load(f)
        classification_types = [item['clasificacion_tipo'] for item in data]
        classification_types_options = [(option, option) for option in classification_types]
        classification_types_checkboxes = gr.CheckboxGroup(label="Clasificacion Tipo", choices=classification_types_options, interactive = True)
        return output, classification_types_checkboxes
        
    examples = [
        ["Ejemplo de texto", True, False],
        ["Otro ejemplo", False, True]
    ]
        
    # Creamos la interfaz
    iface = gr.Interface(
        fn=fn,
        inputs=[input_text, rag_checkbox, prompt_template],
        outputs=[output_text, classification_types_checkboxes],
        examples=examples
    )

    return iface

iface = create_interface()
iface.launch()