File size: 1,601 Bytes
a8dd5f5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
import os
import pandas as pd
from datetime import datetime
from dotenv import load_dotenv
from langchain_core.output_parsers import StrOutputParser
from langchain.prompts import ChatPromptTemplate
from langchain.chat_models import ChatOpenAI
from prompts.summary_prompt import metrological_data_summary_prompt

load_dotenv()


def get_summary(scenario: str, temperature_df: pd.DataFrame, rain_df: pd.DataFrame, irradiance_df: pd.DataFrame) -> str:

    today = datetime.today().strftime("%Y/%m/%d")

    temp_data = temperature_df.head(len(temperature_df)).to_string(index=False)
    rain_data = rain_df.head(len(rain_df)).to_string(index=False)
    irradiance_data = irradiance_df.head(len(irradiance_df)).to_string(index=False)

    llm = ChatOpenAI(
        model="gpt-4o",
        temperature=0,
        max_tokens=None,
        timeout=None,
        max_retries=2,
        api_key=os.environ.get("OPENAI_API_KEY")
        )
    output_parser = StrOutputParser()
    prompt = ChatPromptTemplate.from_messages(
        [
            ("system", metrological_data_summary_prompt),
            ("human", "Je veux un résumé de ces prévisions métérologique: les données de temperature {temp_data}, les données de précipitation {rain_data}, les données de radiance solaire {irradiance_data}")
        ]
    )
    chain = prompt | llm | output_parser
    
    response = chain.invoke({
        "scenario": scenario,
        "today": today,
        "temp_data": temp_data,
        "rain_data": rain_data,
        "irradiance_data": irradiance_data
    })

    return output_parser.parse(response)