File size: 1,550 Bytes
ea99abb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
from llms import LLM
from utils.remote_client import execute_remote_task

def text_summarization(text: str, model: str, summary_length: str, use_llm: bool = True) -> str:
    """
    Summarize the input text using either LLM or traditional (Modal API) method.
    """
    if not text.strip():
        return ""
    if use_llm:
        return _summarization_with_llm(text, model, summary_length)
    else:
        return _summarization_with_traditional(text, model, summary_length)

def _summarization_with_llm(text: str, model: str, summary_length: str) -> str:
    try:
        llm = LLM(model=model)
        prompt = (
            f"Summarize the following text in {summary_length} detail. "
            f"Text: {text}\nSummary:"
        )
        summary = llm.generate(prompt)
        return summary.strip()
    except Exception as e:
        print(f"Error in LLM summarization: {str(e)}")
        return "Oops! Something went wrong. Please try again later."

def _summarization_with_traditional(text: str, model: str, summary_length: str) -> str:
    try:
        payload = {
            "text": text,
            "model": model,
            "summary_length": summary_length,
        }
        resp = execute_remote_task("summarization", payload)
        if "error" in resp:
            return "Oops! Something went wrong. Please try again later."
        return resp.get("summary", "")
    except Exception as e:
        print(f"Error in traditional summarization: {str(e)}")
        return "Oops! Something went wrong. Please try again later."