Inicio
Browse files- .gitignore +3 -0
- Dockerfile +13 -0
- IA.py +21 -0
- asistente.py +5 -0
- main.py +29 -0
- operacionesIA.py +49 -0
- requirements.txt +3 -0
.gitignore
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
/venv/
|
2 |
+
bridges.py
|
3 |
+
/__pycache__/
|
Dockerfile
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM python:3.9
|
2 |
+
|
3 |
+
RUN useradd -m -u 1000 user
|
4 |
+
USER user
|
5 |
+
ENV PATH="/home/user/.local/bin:$PATH"
|
6 |
+
|
7 |
+
WORKDIR /app
|
8 |
+
|
9 |
+
COPY --chown=user ./requirements.txt requirements.txt
|
10 |
+
RUN pip install --no-cache-dir --upgrade -r requirements.txt
|
11 |
+
|
12 |
+
COPY --chown=user . /app
|
13 |
+
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
|
IA.py
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import bridges
|
2 |
+
from openai import OpenAI
|
3 |
+
import operacionesIA
|
4 |
+
import asistente
|
5 |
+
|
6 |
+
client = OpenAI(api_key=bridges.buzz)
|
7 |
+
|
8 |
+
def getMacronutrientes(prompt):
|
9 |
+
return operacionesIA.consulta(asistente.macronutrientes, prompt)
|
10 |
+
|
11 |
+
def getComidas(prompt):
|
12 |
+
return operacionesIA.consulta(asistente.comidas, prompt)
|
13 |
+
|
14 |
+
def getAlimentos(prompt):
|
15 |
+
return operacionesIA.consulta(asistente.alimentos, prompt)
|
16 |
+
|
17 |
+
def getReceta(prompt):
|
18 |
+
return operacionesIA.consulta(asistente.receta, prompt)
|
19 |
+
|
20 |
+
def getSustitucion(prompt):
|
21 |
+
return operacionesIA.consulta(asistente.sustitucion, prompt)
|
asistente.py
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
macronutrientes = "asst_9JBQUaVq1Z2hsEhWIub9skHl"
|
2 |
+
comidas = "asst_EJIfpgocC28VHCw2B6V6MduR"
|
3 |
+
alimentos = "asst_CjpwbK3oURVXkrkCz00TxTRv"
|
4 |
+
receta = "asst_b48NBLlvk7cXUdp8DcLxAvsM"
|
5 |
+
sustitucion = "asst_YsHcJZ5IBpRkVM4M6Wmf5d2w"
|
main.py
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import FastAPI
|
2 |
+
import IA
|
3 |
+
|
4 |
+
app = FastAPI()
|
5 |
+
|
6 |
+
#1.- Obtener distribuci贸n macronutrientes.
|
7 |
+
@app.post("/macronutrientes/")
|
8 |
+
async def macronutrientes(prompt: str):
|
9 |
+
return IA.getMacronutrientes(prompt)
|
10 |
+
|
11 |
+
#2.- Obtener distribuci贸n comidas.
|
12 |
+
@app.post("/comidas/")
|
13 |
+
async def comidas(prompt: str):
|
14 |
+
return IA.getComidas(prompt)
|
15 |
+
|
16 |
+
#3.- Obtener lista alimentos.
|
17 |
+
@app.post("/lista_alimentos/")
|
18 |
+
async def lista_alimentos(prompt: str):
|
19 |
+
return IA.getAlimentos(prompt)
|
20 |
+
|
21 |
+
#4.- Obtener receta.
|
22 |
+
@app.post("/receta/")
|
23 |
+
async def receta(prompt: str):
|
24 |
+
return IA.getReceta(prompt)
|
25 |
+
|
26 |
+
#5.- Sustituir alimento.
|
27 |
+
@app.post("/sustituir_alimento/")
|
28 |
+
async def sustituir_alimento(prompt: str):
|
29 |
+
return IA.getSustitucion(prompt)
|
operacionesIA.py
ADDED
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import bridges
|
2 |
+
from openai import OpenAI
|
3 |
+
import time
|
4 |
+
import asistente
|
5 |
+
|
6 |
+
client = OpenAI(api_key=bridges.buzz)
|
7 |
+
|
8 |
+
def consulta(asistente, prompt):
|
9 |
+
thread = preparaPregunta(prompt)
|
10 |
+
respuesta = ejecutaLlamado(thread, asistente)
|
11 |
+
|
12 |
+
return respuesta
|
13 |
+
|
14 |
+
#Subfunciones de consulta.
|
15 |
+
def preparaPregunta(prompt):
|
16 |
+
thread = client.beta.threads.create()
|
17 |
+
message = client.beta.threads.messages.create(
|
18 |
+
thread_id=thread.id,
|
19 |
+
role="user",
|
20 |
+
content=prompt,
|
21 |
+
)
|
22 |
+
|
23 |
+
return thread
|
24 |
+
|
25 |
+
def ejecutaLlamado(thread, asistente):
|
26 |
+
|
27 |
+
run = client.beta.threads.runs.create(
|
28 |
+
thread_id=thread.id,
|
29 |
+
assistant_id=asistente
|
30 |
+
)
|
31 |
+
|
32 |
+
# 4锔忊儯 Esperar a que el asistente termine de procesar
|
33 |
+
while True:
|
34 |
+
run_status = client.beta.threads.runs.retrieve(
|
35 |
+
thread_id=thread.id,
|
36 |
+
run_id=run.id
|
37 |
+
)
|
38 |
+
if run_status.status in ["completed", "failed", "cancelled"]:
|
39 |
+
break
|
40 |
+
time.sleep(2) # 馃攧 Espera 2 segundos antes de revisar de nuevo
|
41 |
+
|
42 |
+
# 5锔忊儯 Obtener la respuesta del asistente
|
43 |
+
messages = client.beta.threads.messages.list(thread_id=thread.id)
|
44 |
+
for msg in messages.data:
|
45 |
+
if msg.role == "assistant":
|
46 |
+
return msg.content[0].text.value
|
47 |
+
|
48 |
+
return "No se recibi贸 respuesta del asistente"
|
49 |
+
|
requirements.txt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
fastapi
|
2 |
+
fastapi[standard]
|
3 |
+
openai
|