Upload 3 files
Browse files- UI.py +49 -35
- app.py +37 -3
- model_app.py +56 -35
UI.py
CHANGED
@@ -1,16 +1,16 @@
|
|
1 |
# UI.py
|
2 |
import gradio as gr
|
3 |
|
4 |
-
#
|
|
|
5 |
|
6 |
-
def create_interface(process_function_for_button):
|
7 |
"""
|
8 |
Esta funci贸n crea la interfaz de usuario y la devuelve.
|
9 |
Conecta el bot贸n de submit a la 'process_function_for_button' proporcionada.
|
10 |
"""
|
11 |
|
12 |
-
with gr.Blocks(theme='upsatwal/mlsc_tiet') as demo:
|
13 |
-
# with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
14 |
gr.Markdown("# Modelado de Bioprocesos con Ecuaciones Personalizadas y An谩lisis por IA")
|
15 |
|
16 |
with gr.Row():
|
@@ -26,35 +26,39 @@ def create_interface(process_function_for_button): # <-- A脩ADIDO: process_funct
|
|
26 |
value='best'
|
27 |
)
|
28 |
with gr.Column(scale=1):
|
29 |
-
gr.Markdown("### Conteo de Ecuaciones")
|
30 |
-
biomass_eq_count_ui = gr.Number(label="
|
31 |
-
substrate_eq_count_ui = gr.Number(label="
|
32 |
-
product_eq_count_ui = gr.Number(label="
|
33 |
|
|
|
34 |
with gr.Accordion("Ecuaciones y Par谩metros de Biomasa", open=True):
|
35 |
with gr.Row():
|
36 |
with gr.Column():
|
37 |
-
biomass_eq1_ui = gr.Textbox(label="Ecuaci贸n de Biomasa 1
|
38 |
-
biomass_param1_ui = gr.Textbox(label="Par谩metros Biomasa 1
|
39 |
-
biomass_bound1_ui = gr.Textbox(label="L铆mites Biomasa 1
|
|
|
40 |
biomass_col2 = gr.Column(visible=False)
|
41 |
with biomass_col2:
|
42 |
biomass_eq2_ui = gr.Textbox(label="Ecuaci贸n de Biomasa 2", value="X0 * exp(um * t)", lines=2)
|
43 |
biomass_param2_ui = gr.Textbox(label="Par谩metros Biomasa 2", value="X0, um")
|
44 |
-
biomass_bound2_ui = gr.Textbox(label="L铆mites Biomasa 2", value="(0, inf), (0, inf)")
|
|
|
45 |
biomass_col3 = gr.Column(visible=False)
|
46 |
with biomass_col3:
|
47 |
biomass_eq3_ui = gr.Textbox(label="Ecuaci贸n de Biomasa 3", lines=2)
|
48 |
biomass_param3_ui = gr.Textbox(label="Par谩metros Biomasa 3")
|
49 |
biomass_bound3_ui = gr.Textbox(label="L铆mites Biomasa 3")
|
50 |
|
|
|
51 |
with gr.Accordion("Ecuaciones y Par谩metros de Sustrato", open=True):
|
52 |
-
gr.Markdown("Para Sustrato
|
53 |
with gr.Row():
|
54 |
with gr.Column():
|
55 |
substrate_eq1_ui = gr.Textbox(label="Ecuaci贸n de Sustrato 1", value="S0 - (X_val / YXS) - mS * t", lines=2)
|
56 |
substrate_param1_ui = gr.Textbox(label="Par谩metros Sustrato 1", value="S0, YXS, mS")
|
57 |
-
substrate_bound1_ui = gr.Textbox(label="L铆mites Sustrato 1", value="(0, inf), (
|
58 |
substrate_col2 = gr.Column(visible=False)
|
59 |
with substrate_col2:
|
60 |
substrate_eq2_ui = gr.Textbox(label="Ecuaci贸n de Sustrato 2", lines=2)
|
@@ -66,12 +70,13 @@ def create_interface(process_function_for_button): # <-- A脩ADIDO: process_funct
|
|
66 |
substrate_param3_ui = gr.Textbox(label="Par谩metros Sustrato 3")
|
67 |
substrate_bound3_ui = gr.Textbox(label="L铆mites Sustrato 3")
|
68 |
|
|
|
69 |
with gr.Accordion("Ecuaciones y Par谩metros de Producto", open=True):
|
70 |
with gr.Row():
|
71 |
with gr.Column():
|
72 |
product_eq1_ui = gr.Textbox(label="Ecuaci贸n de Producto 1", value="P0 + YPX * X_val + mP * t", lines=2)
|
73 |
product_param1_ui = gr.Textbox(label="Par谩metros Producto 1", value="P0, YPX, mP")
|
74 |
-
product_bound1_ui = gr.Textbox(label="L铆mites Producto 1", value="(0, inf), (0, inf), (0, inf)")
|
75 |
product_col2 = gr.Column(visible=False)
|
76 |
with product_col2:
|
77 |
product_eq2_ui = gr.Textbox(label="Ecuaci贸n de Producto 2", lines=2)
|
@@ -83,22 +88,25 @@ def create_interface(process_function_for_button): # <-- A脩ADIDO: process_funct
|
|
83 |
product_param3_ui = gr.Textbox(label="Par谩metros Producto 3")
|
84 |
product_bound3_ui = gr.Textbox(label="L铆mites Producto 3")
|
85 |
|
86 |
-
|
87 |
-
|
|
|
|
|
88 |
|
89 |
-
biomass_eq_count_ui.change(fn=
|
90 |
-
substrate_eq_count_ui.change(fn=
|
91 |
-
product_eq_count_ui.change(fn=
|
92 |
|
93 |
submit_button = gr.Button("Procesar y Analizar", variant="primary", scale=1)
|
94 |
|
95 |
gr.Markdown("## Resultados del An谩lisis")
|
96 |
with gr.Row():
|
97 |
-
image_output = gr.Image(label="Gr谩fico Generado", type="pil", width=600, height=900, scale=2)
|
98 |
with gr.Column(scale=3):
|
99 |
analysis_output = gr.Markdown(label="An谩lisis del Modelo por IA")
|
100 |
|
101 |
-
|
|
|
102 |
file_input,
|
103 |
biomass_eq1_ui, biomass_eq2_ui, biomass_eq3_ui,
|
104 |
biomass_param1_ui, biomass_param2_ui, biomass_param3_ui,
|
@@ -117,24 +125,30 @@ def create_interface(process_function_for_button): # <-- A脩ADIDO: process_funct
|
|
117 |
product_eq_count_ui
|
118 |
]
|
119 |
|
120 |
-
|
|
|
|
|
121 |
submit_button.click(
|
122 |
fn=process_function_for_button, # Usa la funci贸n pasada como argumento
|
123 |
-
inputs=
|
124 |
-
outputs=
|
125 |
)
|
126 |
|
127 |
# Inicializar la visibilidad correctamente al cargar la demo
|
128 |
-
#
|
129 |
-
|
130 |
-
|
131 |
-
|
132 |
-
|
133 |
-
|
134 |
-
|
135 |
-
|
|
|
136 |
inputs=[biomass_eq_count_ui, substrate_eq_count_ui, product_eq_count_ui],
|
137 |
-
outputs=[
|
|
|
|
|
|
|
|
|
138 |
)
|
139 |
-
|
140 |
return demo
|
|
|
1 |
# UI.py
|
2 |
import gradio as gr
|
3 |
|
4 |
+
# La importaci贸n de process_and_plot ya no es necesaria aqu铆,
|
5 |
+
# la funci贸n de procesamiento se pasar谩 como argumento a create_interface.
|
6 |
|
7 |
+
def create_interface(process_function_for_button):
|
8 |
"""
|
9 |
Esta funci贸n crea la interfaz de usuario y la devuelve.
|
10 |
Conecta el bot贸n de submit a la 'process_function_for_button' proporcionada.
|
11 |
"""
|
12 |
|
13 |
+
with gr.Blocks(theme='upsatwal/mlsc_tiet') as demo: # O usa gr.themes.Soft() o tu theme preferido
|
|
|
14 |
gr.Markdown("# Modelado de Bioprocesos con Ecuaciones Personalizadas y An谩lisis por IA")
|
15 |
|
16 |
with gr.Row():
|
|
|
26 |
value='best'
|
27 |
)
|
28 |
with gr.Column(scale=1):
|
29 |
+
gr.Markdown("### Conteo de Ecuaciones a Probar")
|
30 |
+
biomass_eq_count_ui = gr.Number(label="Biomasa (1-3)", value=1, minimum=1, maximum=3, precision=0, step=1)
|
31 |
+
substrate_eq_count_ui = gr.Number(label="Sustrato (1-3)", value=1, minimum=1, maximum=3, precision=0, step=1)
|
32 |
+
product_eq_count_ui = gr.Number(label="Producto (1-3)", value=1, minimum=1, maximum=3, precision=0, step=1)
|
33 |
|
34 |
+
# --- Secci贸n de Biomasa ---
|
35 |
with gr.Accordion("Ecuaciones y Par谩metros de Biomasa", open=True):
|
36 |
with gr.Row():
|
37 |
with gr.Column():
|
38 |
+
biomass_eq1_ui = gr.Textbox(label="Ecuaci贸n de Biomasa 1", value="Xm * (1 - exp(-um * (t - t_lag)))", lines=2, placeholder="Ej: Xm * (1 - exp(-um * (t - t_lag)))")
|
39 |
+
biomass_param1_ui = gr.Textbox(label="Par谩metros Biomasa 1", value="Xm, um, t_lag", info="Nombres de par谩metros, coma sep. Use 't' para tiempo. 'X_val' para X(t) en S/P.")
|
40 |
+
biomass_bound1_ui = gr.Textbox(label="L铆mites Biomasa 1", value="(0, np.inf), (0, np.inf), (0, np.inf)", info="Formato: (low,high) para cada param. Use np.inf.")
|
41 |
+
# Columna 2 para Biomasa (inicialmente oculta)
|
42 |
biomass_col2 = gr.Column(visible=False)
|
43 |
with biomass_col2:
|
44 |
biomass_eq2_ui = gr.Textbox(label="Ecuaci贸n de Biomasa 2", value="X0 * exp(um * t)", lines=2)
|
45 |
biomass_param2_ui = gr.Textbox(label="Par谩metros Biomasa 2", value="X0, um")
|
46 |
+
biomass_bound2_ui = gr.Textbox(label="L铆mites Biomasa 2", value="(0, np.inf), (0, np.inf)")
|
47 |
+
# Columna 3 para Biomasa (inicialmente oculta)
|
48 |
biomass_col3 = gr.Column(visible=False)
|
49 |
with biomass_col3:
|
50 |
biomass_eq3_ui = gr.Textbox(label="Ecuaci贸n de Biomasa 3", lines=2)
|
51 |
biomass_param3_ui = gr.Textbox(label="Par谩metros Biomasa 3")
|
52 |
biomass_bound3_ui = gr.Textbox(label="L铆mites Biomasa 3")
|
53 |
|
54 |
+
# --- Secci贸n de Sustrato ---
|
55 |
with gr.Accordion("Ecuaciones y Par谩metros de Sustrato", open=True):
|
56 |
+
gr.Markdown("Para ecuaciones de Sustrato y Producto que dependen de la biomasa X(t), usa la variable `X_val` en tu ecuaci贸n. Ejemplo: `S0 - (X_val / YXS)`.")
|
57 |
with gr.Row():
|
58 |
with gr.Column():
|
59 |
substrate_eq1_ui = gr.Textbox(label="Ecuaci贸n de Sustrato 1", value="S0 - (X_val / YXS) - mS * t", lines=2)
|
60 |
substrate_param1_ui = gr.Textbox(label="Par谩metros Sustrato 1", value="S0, YXS, mS")
|
61 |
+
substrate_bound1_ui = gr.Textbox(label="L铆mites Sustrato 1", value="(0, np.inf), (1e-9, np.inf), (0, np.inf)") # YXS no debe ser cero
|
62 |
substrate_col2 = gr.Column(visible=False)
|
63 |
with substrate_col2:
|
64 |
substrate_eq2_ui = gr.Textbox(label="Ecuaci贸n de Sustrato 2", lines=2)
|
|
|
70 |
substrate_param3_ui = gr.Textbox(label="Par谩metros Sustrato 3")
|
71 |
substrate_bound3_ui = gr.Textbox(label="L铆mites Sustrato 3")
|
72 |
|
73 |
+
# --- Secci贸n de Producto ---
|
74 |
with gr.Accordion("Ecuaciones y Par谩metros de Producto", open=True):
|
75 |
with gr.Row():
|
76 |
with gr.Column():
|
77 |
product_eq1_ui = gr.Textbox(label="Ecuaci贸n de Producto 1", value="P0 + YPX * X_val + mP * t", lines=2)
|
78 |
product_param1_ui = gr.Textbox(label="Par谩metros Producto 1", value="P0, YPX, mP")
|
79 |
+
product_bound1_ui = gr.Textbox(label="L铆mites Producto 1", value="(0, np.inf), (0, np.inf), (0, np.inf)")
|
80 |
product_col2 = gr.Column(visible=False)
|
81 |
with product_col2:
|
82 |
product_eq2_ui = gr.Textbox(label="Ecuaci贸n de Producto 2", lines=2)
|
|
|
88 |
product_param3_ui = gr.Textbox(label="Par谩metros Producto 3")
|
89 |
product_bound3_ui = gr.Textbox(label="L铆mites Producto 3")
|
90 |
|
91 |
+
# L贸gica para mostrar/ocultar campos de ecuaci贸n din谩micamente
|
92 |
+
# La funci贸n `update_visibility` devuelve una tupla de diccionarios para Gradio
|
93 |
+
def update_visibility_fn(count):
|
94 |
+
return gr.Column(visible=count >= 2), gr.Column(visible=count >= 3)
|
95 |
|
96 |
+
biomass_eq_count_ui.change(fn=update_visibility_fn, inputs=biomass_eq_count_ui, outputs=[biomass_col2, biomass_col3])
|
97 |
+
substrate_eq_count_ui.change(fn=update_visibility_fn, inputs=substrate_eq_count_ui, outputs=[substrate_col2, substrate_col3])
|
98 |
+
product_eq_count_ui.change(fn=update_visibility_fn, inputs=product_eq_count_ui, outputs=[product_col2, product_col3])
|
99 |
|
100 |
submit_button = gr.Button("Procesar y Analizar", variant="primary", scale=1)
|
101 |
|
102 |
gr.Markdown("## Resultados del An谩lisis")
|
103 |
with gr.Row():
|
104 |
+
image_output = gr.Image(label="Gr谩fico Generado", type="pil", width=600, height=900, scale=2, show_download_button=True)
|
105 |
with gr.Column(scale=3):
|
106 |
analysis_output = gr.Markdown(label="An谩lisis del Modelo por IA")
|
107 |
|
108 |
+
# Lista de todos los inputs para el bot贸n de submit
|
109 |
+
all_inputs_for_button = [
|
110 |
file_input,
|
111 |
biomass_eq1_ui, biomass_eq2_ui, biomass_eq3_ui,
|
112 |
biomass_param1_ui, biomass_param2_ui, biomass_param3_ui,
|
|
|
125 |
product_eq_count_ui
|
126 |
]
|
127 |
|
128 |
+
outputs_for_button = [image_output, analysis_output]
|
129 |
+
|
130 |
+
# Conexi贸n del bot贸n DENTRO del contexto de Blocks
|
131 |
submit_button.click(
|
132 |
fn=process_function_for_button, # Usa la funci贸n pasada como argumento
|
133 |
+
inputs=all_inputs_for_button,
|
134 |
+
outputs=outputs_for_button
|
135 |
)
|
136 |
|
137 |
# Inicializar la visibilidad correctamente al cargar la demo
|
138 |
+
# para los campos de ecuaci贸n 2 y 3.
|
139 |
+
def initial_visibility_setup(val_b, val_s, val_p):
|
140 |
+
return (
|
141 |
+
update_visibility_fn(val_b)[0], update_visibility_fn(val_b)[1],
|
142 |
+
update_visibility_fn(val_s)[0], update_visibility_fn(val_s)[1],
|
143 |
+
update_visibility_fn(val_p)[0], update_visibility_fn(val_p)[1]
|
144 |
+
)
|
145 |
+
demo.load(
|
146 |
+
fn=initial_visibility_setup,
|
147 |
inputs=[biomass_eq_count_ui, substrate_eq_count_ui, product_eq_count_ui],
|
148 |
+
outputs=[
|
149 |
+
biomass_col2, biomass_col3,
|
150 |
+
substrate_col2, substrate_col3,
|
151 |
+
product_col2, product_col3
|
152 |
+
]
|
153 |
)
|
|
|
154 |
return demo
|
app.py
CHANGED
@@ -1,11 +1,45 @@
|
|
1 |
# app.py
|
2 |
from UI import create_interface
|
3 |
import interface as app_interface_module # Necesitamos la funci贸n process_and_plot
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
4 |
|
5 |
def main():
|
6 |
-
# Pasa la funci贸n de procesamiento real a create_interface
|
7 |
-
|
8 |
-
|
|
|
|
|
|
|
9 |
|
10 |
if __name__ == "__main__":
|
11 |
main()
|
|
|
1 |
# app.py
|
2 |
from UI import create_interface
|
3 |
import interface as app_interface_module # Necesitamos la funci贸n process_and_plot
|
4 |
+
import sys # Para manipulaci贸n de path si es necesario
|
5 |
+
from pathlib import Path
|
6 |
+
|
7 |
+
# --- Intento de Neutralizar el decorador problem谩tico para ejecuci贸n local ---
|
8 |
+
# Esto es un hack para evitar el error de 'No @spaces.GPU' si Gradio lo busca.
|
9 |
+
try:
|
10 |
+
# Intentar importar el m贸dulo 'decorators' de tu proyecto
|
11 |
+
# Asume que app.py est谩 en la ra铆z del proyecto y decorators.py tambi茅n
|
12 |
+
current_dir = Path(__file__).parent
|
13 |
+
if str(current_dir) not in sys.path:
|
14 |
+
sys.path.insert(0, str(current_dir))
|
15 |
+
import decorators
|
16 |
+
|
17 |
+
class _GPUNeutralizerLocal:
|
18 |
+
def __init__(self, *args, **kwargs): pass
|
19 |
+
def __call__(self, func): return func
|
20 |
+
|
21 |
+
# Sobrescribir si existen para que no causen error
|
22 |
+
if hasattr(decorators, 'GPU'):
|
23 |
+
decorators.GPU = _GPUNeutralizerLocal
|
24 |
+
print("INFO (app.py): 'decorators.GPU' neutralizado para ejecuci贸n local.")
|
25 |
+
if hasattr(decorators, 'gpu_decorator'):
|
26 |
+
decorators.gpu_decorator = lambda duration=0: lambda func: func
|
27 |
+
print("INFO (app.py): 'decorators.gpu_decorator' neutralizado para ejecuci贸n local.")
|
28 |
+
|
29 |
+
except ImportError:
|
30 |
+
print("ADVERTENCIA (app.py): M贸dulo 'decorators' no encontrado, no se pudo neutralizar GPU. Puede que no sea necesario.")
|
31 |
+
except Exception as e_dec:
|
32 |
+
print(f"ADVERTENCIA (app.py): Error al intentar neutralizar decoradores: {e_dec}")
|
33 |
+
# --- Fin de la neutralizaci贸n ---
|
34 |
+
|
35 |
|
36 |
def main():
|
37 |
+
# Pasa la funci贸n de procesamiento real (process_and_plot) a create_interface
|
38 |
+
# app_interface_module.process_and_plot es la funci贸n que queremos que el bot贸n llame.
|
39 |
+
demo_instance = create_interface(process_function_for_button=app_interface_module.process_and_plot)
|
40 |
+
|
41 |
+
print("Lanzando interfaz Gradio localmente...")
|
42 |
+
demo_instance.launch()
|
43 |
|
44 |
if __name__ == "__main__":
|
45 |
main()
|
model_app.py
CHANGED
@@ -3,7 +3,7 @@ import modal
|
|
3 |
import sys
|
4 |
from pathlib import Path
|
5 |
import os
|
6 |
-
import traceback
|
7 |
|
8 |
# --- Configuraci贸n ---
|
9 |
PYTHON_VERSION = "3.10"
|
@@ -32,16 +32,16 @@ app_image = (
|
|
32 |
)
|
33 |
)
|
34 |
|
35 |
-
# --- Funci贸n Modal para LLM (sin cambios respecto a la anterior respuesta) ---
|
36 |
@stub.function(
|
37 |
-
image=app_image,
|
38 |
gpu="any",
|
39 |
secrets=[modal.Secret.from_name("huggingface-read-token", optional=True)],
|
40 |
-
timeout=600,
|
41 |
volumes={"/cache/huggingface": modal.Volume.persisted(f"{APP_NAME}-hf-cache-vol")}
|
42 |
)
|
43 |
def generate_analysis_llm_modal_remote(prompt: str, model_path_config: str, max_new_tokens_config: int) -> str:
|
44 |
-
import torch #
|
45 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
46 |
|
47 |
hf_token = os.environ.get("HUGGING_FACE_TOKEN")
|
@@ -50,21 +50,22 @@ def generate_analysis_llm_modal_remote(prompt: str, model_path_config: str, max_
|
|
50 |
print(f"LLM Modal Func: Cargando modelo: {model_path_config} con token: {'S铆' if hf_token else 'No'}")
|
51 |
|
52 |
try:
|
53 |
-
tokenizer = AutoTokenizer.from_pretrained(model_path_config, cache_dir="/cache/huggingface/hub", token=hf_token)
|
54 |
model = AutoModelForCausalLM.from_pretrained(
|
55 |
model_path_config,
|
56 |
torch_dtype="auto",
|
57 |
device_map="auto",
|
58 |
cache_dir="/cache/huggingface/hub",
|
59 |
token=hf_token,
|
|
|
60 |
)
|
61 |
|
62 |
-
|
63 |
-
|
64 |
-
# Asumamos un context window conservador de 4096 si no se conoce.
|
65 |
-
model_context_window = getattr(model.config, 'max_position_embeddings', 4096)
|
66 |
-
max_prompt_len = model_context_window - max_new_tokens_config - 50 # 50 tokens de buffer
|
67 |
|
|
|
|
|
|
|
68 |
inputs = tokenizer(prompt, return_tensors="pt", truncation=True, max_length=max_prompt_len).to(model.device)
|
69 |
|
70 |
with torch.no_grad():
|
@@ -90,52 +91,72 @@ def generate_analysis_llm_modal_remote(prompt: str, model_path_config: str, max_
|
|
90 |
return f"Error al generar an谩lisis con el modelo LLM: {str(e)}"
|
91 |
|
92 |
# --- Servidor Gradio ---
|
93 |
-
@stub.asgi_app()
|
94 |
def serve_gradio_app_asgi():
|
95 |
-
# Estas importaciones ocurren DENTRO del contenedor Modal
|
96 |
import gradio as gr
|
97 |
-
|
|
|
|
|
|
|
98 |
if REMOTE_APP_DIR not in sys.path:
|
99 |
sys.path.insert(0, REMOTE_APP_DIR)
|
|
|
100 |
|
101 |
-
|
102 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
103 |
from config import MODEL_PATH as cfg_MODEL_PATH, MAX_LENGTH as cfg_MAX_LENGTH
|
104 |
|
105 |
-
# Wrapper para llamar a la funci贸n Modal remota
|
106 |
-
def
|
107 |
-
print("Gradio Backend: Llamando a generate_analysis_llm_modal_remote.remote...")
|
108 |
return generate_analysis_llm_modal_remote.remote(prompt, cfg_MODEL_PATH, cfg_MAX_LENGTH)
|
109 |
|
110 |
-
# Inyectar esta funci贸n wrapper en el m贸dulo `interface`
|
111 |
-
app_interface_module.generate_analysis_from_modal =
|
112 |
app_interface_module.USE_MODAL_FOR_LLM_ANALYSIS = True
|
|
|
113 |
|
114 |
-
# Crear la app Gradio, pas谩ndole la funci贸n de procesamiento
|
115 |
-
|
116 |
-
gradio_ui = create_interface(process_function_for_button=app_interface_module.process_and_plot)
|
117 |
|
118 |
-
|
|
|
119 |
|
120 |
@stub.local_entrypoint()
|
121 |
-
def
|
122 |
-
print("Probando la generaci贸n de LLM con Modal (
|
123 |
-
# Necesitas importar config para que MODEL_PATH y MAX_LENGTH est茅n definidos
|
124 |
-
# Esto debe hacerse dentro de un contexto donde los m贸dulos de la app sean accesibles.
|
125 |
-
# Es mejor llamar a la funci贸n stub desde aqu铆.
|
126 |
if REMOTE_APP_DIR not in sys.path: # Asegurar path para pruebas locales tambi茅n
|
127 |
sys.path.insert(0, str(LOCAL_APP_DIR))
|
128 |
from config import MODEL_PATH, MAX_LENGTH
|
129 |
|
130 |
sample_prompt = "Explica brevemente el concepto de R cuadrado (R虏) en el ajuste de modelos."
|
131 |
-
# Ejecuta la funci贸n Modal directamente (no .remote() para prueba local de l贸gica)
|
132 |
-
# Para probar la ejecuci贸n remota real, necesitar铆as `modal run modal_app.py test_llm`
|
133 |
-
# o que test_llm llame a .remote().
|
134 |
-
# Aqu铆 vamos a probar la llamada remota.
|
135 |
try:
|
|
|
|
|
136 |
analysis = generate_analysis_llm_modal_remote.remote(sample_prompt, MODEL_PATH, MAX_LENGTH)
|
137 |
print("\nRespuesta del LLM:")
|
138 |
print(analysis)
|
139 |
except Exception as e:
|
140 |
-
print(f"Error durante
|
141 |
traceback.print_exc()
|
|
|
3 |
import sys
|
4 |
from pathlib import Path
|
5 |
import os
|
6 |
+
import traceback
|
7 |
|
8 |
# --- Configuraci贸n ---
|
9 |
PYTHON_VERSION = "3.10"
|
|
|
32 |
)
|
33 |
)
|
34 |
|
35 |
+
# --- Funci贸n Modal para LLM (sin cambios respecto a la anterior respuesta completa) ---
|
36 |
@stub.function(
|
37 |
+
image=app_image, # Hereda la imagen base del stub si est谩 definida, o usa esta.
|
38 |
gpu="any",
|
39 |
secrets=[modal.Secret.from_name("huggingface-read-token", optional=True)],
|
40 |
+
timeout=600, # 10 minutos
|
41 |
volumes={"/cache/huggingface": modal.Volume.persisted(f"{APP_NAME}-hf-cache-vol")}
|
42 |
)
|
43 |
def generate_analysis_llm_modal_remote(prompt: str, model_path_config: str, max_new_tokens_config: int) -> str:
|
44 |
+
import torch # Importaciones pesadas dentro de la funci贸n Modal
|
45 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
46 |
|
47 |
hf_token = os.environ.get("HUGGING_FACE_TOKEN")
|
|
|
50 |
print(f"LLM Modal Func: Cargando modelo: {model_path_config} con token: {'S铆' if hf_token else 'No'}")
|
51 |
|
52 |
try:
|
53 |
+
tokenizer = AutoTokenizer.from_pretrained(model_path_config, cache_dir="/cache/huggingface/hub", token=hf_token, trust_remote_code=True)
|
54 |
model = AutoModelForCausalLM.from_pretrained(
|
55 |
model_path_config,
|
56 |
torch_dtype="auto",
|
57 |
device_map="auto",
|
58 |
cache_dir="/cache/huggingface/hub",
|
59 |
token=hf_token,
|
60 |
+
trust_remote_code=True # Necesario para algunos modelos como Qwen
|
61 |
)
|
62 |
|
63 |
+
model_context_window = getattr(model.config, 'max_position_embeddings', getattr(model.config, 'sliding_window', 4096)) # Para Qwen2 sliding_window
|
64 |
+
if model_context_window is None : model_context_window = 4096 # Fallback
|
|
|
|
|
|
|
65 |
|
66 |
+
max_prompt_len = model_context_window - max_new_tokens_config - 50 # Buffer
|
67 |
+
if max_prompt_len <=0 : max_prompt_len = model_context_window // 2 # Si max_new_tokens es muy grande
|
68 |
+
|
69 |
inputs = tokenizer(prompt, return_tensors="pt", truncation=True, max_length=max_prompt_len).to(model.device)
|
70 |
|
71 |
with torch.no_grad():
|
|
|
91 |
return f"Error al generar an谩lisis con el modelo LLM: {str(e)}"
|
92 |
|
93 |
# --- Servidor Gradio ---
|
94 |
+
@stub.asgi_app(image=app_image) # Especificar la imagen para el endpoint ASGI tambi茅n
|
95 |
def serve_gradio_app_asgi():
|
|
|
96 |
import gradio as gr
|
97 |
+
|
98 |
+
# sys.path ya est谩 configurado por la imagen de Modal debido a .env({"PYTHONPATH": REMOTE_APP_DIR})
|
99 |
+
# y .copy_mount(... remote_path=REMOTE_APP_DIR)
|
100 |
+
# No obstante, una comprobaci贸n o inserci贸n expl铆cita no da帽a:
|
101 |
if REMOTE_APP_DIR not in sys.path:
|
102 |
sys.path.insert(0, REMOTE_APP_DIR)
|
103 |
+
print(f"INFO (modal_app.py): A帽adido {REMOTE_APP_DIR} a sys.path")
|
104 |
|
105 |
+
# --- Intento de Neutralizar el decorador problem谩tico ANTES de las importaciones de la app ---
|
106 |
+
# Esto es para evitar el error "No @spaces.GPU function detected" si Gradio lo busca.
|
107 |
+
try:
|
108 |
+
import decorators # Intenta importar TU decorators.py
|
109 |
+
class _GPUNeutralizerModal:
|
110 |
+
def __init__(self, *args, **kwargs): pass
|
111 |
+
def __call__(self, func): return func
|
112 |
+
|
113 |
+
if hasattr(decorators, 'GPU'):
|
114 |
+
decorators.GPU = _GPUNeutralizerModal
|
115 |
+
print("INFO (modal_app.py): 'decorators.GPU' neutralizado para el entorno Modal.")
|
116 |
+
if hasattr(decorators, 'gpu_decorator'):
|
117 |
+
decorators.gpu_decorator = lambda duration=0: lambda func: func
|
118 |
+
print("INFO (modal_app.py): 'decorators.gpu_decorator' neutralizado para el entorno Modal.")
|
119 |
+
except ImportError:
|
120 |
+
print("ADVERTENCIA (modal_app.py): M贸dulo 'decorators' no encontrado durante la neutralizaci贸n. Esto puede ser OK.")
|
121 |
+
except Exception as e_neut:
|
122 |
+
print(f"ADVERTENCIA (modal_app.py): Error durante la neutralizaci贸n de decoradores: {e_neut}")
|
123 |
+
# --- Fin de la neutralizaci贸n ---
|
124 |
+
|
125 |
+
# Importar los m贸dulos de la aplicaci贸n AHORA
|
126 |
+
from UI import create_interface
|
127 |
+
import interface as app_interface_module
|
128 |
from config import MODEL_PATH as cfg_MODEL_PATH, MAX_LENGTH as cfg_MAX_LENGTH
|
129 |
|
130 |
+
# Wrapper para llamar a la funci贸n Modal remota desde tu interface.py
|
131 |
+
def analysis_func_wrapper_for_interface_modal(prompt: str) -> str:
|
132 |
+
print("Gradio Backend (Modal): Llamando a generate_analysis_llm_modal_remote.remote...")
|
133 |
return generate_analysis_llm_modal_remote.remote(prompt, cfg_MODEL_PATH, cfg_MAX_LENGTH)
|
134 |
|
135 |
+
# Inyectar esta funci贸n wrapper en el m贸dulo `interface` que usa Gradio
|
136 |
+
app_interface_module.generate_analysis_from_modal = analysis_func_wrapper_for_interface_modal
|
137 |
app_interface_module.USE_MODAL_FOR_LLM_ANALYSIS = True
|
138 |
+
print("INFO (modal_app.py): Runner de LLM Modal inyectado en el m贸dulo 'interface'.")
|
139 |
|
140 |
+
# Crear la app Gradio, pas谩ndole la funci贸n de procesamiento real
|
141 |
+
gradio_ui_instance = create_interface(process_function_for_button=app_interface_module.process_and_plot)
|
|
|
142 |
|
143 |
+
print("INFO (modal_app.py): Interfaz Gradio creada y lista para ser servida.")
|
144 |
+
return gr.routes.App.create_app(gradio_ui_instance)
|
145 |
|
146 |
@stub.local_entrypoint()
|
147 |
+
def test_llm_local_entry(): # Renombrado para evitar conflicto con el `test_llm` de la respuesta anterior
|
148 |
+
print("Probando la generaci贸n de LLM con Modal (local_entrypoint)...")
|
|
|
|
|
|
|
149 |
if REMOTE_APP_DIR not in sys.path: # Asegurar path para pruebas locales tambi茅n
|
150 |
sys.path.insert(0, str(LOCAL_APP_DIR))
|
151 |
from config import MODEL_PATH, MAX_LENGTH
|
152 |
|
153 |
sample_prompt = "Explica brevemente el concepto de R cuadrado (R虏) en el ajuste de modelos."
|
|
|
|
|
|
|
|
|
154 |
try:
|
155 |
+
# Para ejecutar esto, necesitar铆as que el stub est茅 activo.
|
156 |
+
# `modal run modal_app.py test_llm_local_entry`
|
157 |
analysis = generate_analysis_llm_modal_remote.remote(sample_prompt, MODEL_PATH, MAX_LENGTH)
|
158 |
print("\nRespuesta del LLM:")
|
159 |
print(analysis)
|
160 |
except Exception as e:
|
161 |
+
print(f"Error durante test_llm_local_entry: {e}")
|
162 |
traceback.print_exc()
|