gaia / utils /summary.py
Rim BACCOUR
first summary for meterological data
a8dd5f5 unverified
raw
history blame
1.6 kB
import os
import pandas as pd
from datetime import datetime
from dotenv import load_dotenv
from langchain_core.output_parsers import StrOutputParser
from langchain.prompts import ChatPromptTemplate
from langchain.chat_models import ChatOpenAI
from prompts.summary_prompt import metrological_data_summary_prompt
load_dotenv()
def get_summary(scenario: str, temperature_df: pd.DataFrame, rain_df: pd.DataFrame, irradiance_df: pd.DataFrame) -> str:
today = datetime.today().strftime("%Y/%m/%d")
temp_data = temperature_df.head(len(temperature_df)).to_string(index=False)
rain_data = rain_df.head(len(rain_df)).to_string(index=False)
irradiance_data = irradiance_df.head(len(irradiance_df)).to_string(index=False)
llm = ChatOpenAI(
model="gpt-4o",
temperature=0,
max_tokens=None,
timeout=None,
max_retries=2,
api_key=os.environ.get("OPENAI_API_KEY")
)
output_parser = StrOutputParser()
prompt = ChatPromptTemplate.from_messages(
[
("system", metrological_data_summary_prompt),
("human", "Je veux un résumé de ces prévisions métérologique: les données de temperature {temp_data}, les données de précipitation {rain_data}, les données de radiance solaire {irradiance_data}")
]
)
chain = prompt | llm | output_parser
response = chain.invoke({
"scenario": scenario,
"today": today,
"temp_data": temp_data,
"rain_data": rain_data,
"irradiance_data": irradiance_data
})
return output_parser.parse(response)