uruguayai commited on
Commit
3bbb2bc
verified
1 Parent(s): ff7d42b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -9
app.py CHANGED
@@ -1,13 +1,13 @@
 
1
  from safetensors.torch import load_file
2
  import os
3
  import requests
4
 
5
- # URLs de los archivos
6
  MODEL_URL = "https://huggingface.co/datasets/uruguayai/fooocus/resolve/main/juggernautXL_v8Rundiffusion.safetensors"
7
  MODEL_PATH = "/home/user/app/juggernautXL_v8Rundiffusion.safetensors"
8
 
9
  def download_file(url, path):
10
- """Descarga un archivo desde una URL si no est谩 presente en la ruta especificada."""
11
  if not os.path.exists(path):
12
  response = requests.get(url)
13
  if response.status_code == 200:
@@ -18,15 +18,16 @@ def download_file(url, path):
18
  raise Exception(f"Failed to download {url}, status code: {response.status_code}")
19
 
20
  def load_model():
21
- # Descargar el archivo si no existe
22
  download_file(MODEL_URL, MODEL_PATH)
23
- # Cargar los pesos utilizando safetensors
24
- model = load_file(MODEL_PATH) # Carga el modelo desde el archivo safetensors
25
  return model
26
 
27
- def main():
28
- model = load_model()
29
- print("Model loaded successfully with safetensors!")
 
 
30
 
31
  if __name__ == "__main__":
32
- main()
 
 
1
+ import gradio as gr
2
  from safetensors.torch import load_file
3
  import os
4
  import requests
5
 
6
+ # URLs y rutas de archivos
7
  MODEL_URL = "https://huggingface.co/datasets/uruguayai/fooocus/resolve/main/juggernautXL_v8Rundiffusion.safetensors"
8
  MODEL_PATH = "/home/user/app/juggernautXL_v8Rundiffusion.safetensors"
9
 
10
  def download_file(url, path):
 
11
  if not os.path.exists(path):
12
  response = requests.get(url)
13
  if response.status_code == 200:
 
18
  raise Exception(f"Failed to download {url}, status code: {response.status_code}")
19
 
20
  def load_model():
 
21
  download_file(MODEL_URL, MODEL_PATH)
22
+ model = load_file(MODEL_PATH) # Cargar modelo con safetensors
 
23
  return model
24
 
25
+ def predict(input_text):
26
+ # Funci贸n de predicci贸n simple
27
+ return f"Model loaded and ready: {input_text}"
28
+
29
+ iface = gr.Interface(fn=predict, inputs="text", outputs="text")
30
 
31
  if __name__ == "__main__":
32
+ load_model() # Aseg煤rate de que el modelo est茅 cargado
33
+ iface.launch(server_name="0.0.0.0", server_port=7860)