File size: 1,046 Bytes
8842640
 
 
 
 
32cae45
 
 
8842640
 
6a3b6f1
 
8842640
6a3b6f1
32cae45
 
8842640
 
 
 
 
 
 
 
 
6a3b6f1
8842640
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
from typing import Optional

from src.pipeline import pipeline


# Define the clear function dynamically based on the number of inputs
def clear(*args):
    return tuple([None] * len(args))  # Return None for each input/output component dynamically


def generate_text(
                  country: Optional[str],
                  starting_point: Optional[str],
                  query_text: str,
                  is_sustainable: Optional[bool] = True,
                  model: Optional[str] = "Gemini-1.5-Pro",
                  max_tokens: Optional[int] = 1024,
                  temp: Optional[float] = 0.49,
                  ):
    model_params = {
        'max_tokens': max_tokens,
        'temperature': temp
    }
    pipeline_response = pipeline(
        query=query_text,
        model_name=model,
        sustainability=is_sustainable,
        starting_point=starting_point,
        **model_params
    )
    if pipeline_response is None:
        return "Error while generating response! Please try again."
    return pipeline_response