File size: 3,594 Bytes
4e0c974
cdef94b
4e0c974
 
 
 
a31da9c
cdef94b
8ba499f
a31da9c
cdef94b
8ba499f
4e0c974
cdef94b
 
 
4e0c974
 
cdef94b
8ba499f
 
 
 
cdef94b
4e0c974
 
 
9b5be52
4e0c974
 
 
 
 
 
 
9b5be52
 
4e0c974
9b5be52
 
4e0c974
 
 
cdef94b
 
 
 
89ebcd8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4e0c974
8ba499f
cdef94b
4e0c974
 
cdef94b
4e0c974
cdef94b
 
 
4e0c974
 
cdef94b
 
4e0c974
 
 
 
cdef94b
d662165
4e0c974
a31da9c
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
import aiohttp
import asyncio, pprint
import google.generativeai as palm
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain import PromptTemplate
import os
# from poe_api_wrapper import PoeApi
import pprint

# client = PoeApi("sXvCnfYy8CHnXNTRlxhmVg==")
bot = "Assistant"
CHAT_CODE = ""

PALM_API = ""
API_KEY = os.environ.get("PALM_API", PALM_API)
palm.configure(api_key=API_KEY)


text_splitter = RecursiveCharacterTextSplitter(
    separators=["\n\n", "\n", "."],
    chunk_size=1500,
    length_function=len,
    chunk_overlap=100,
)


map_prompt = """
Write a verbose summary  like a masters student of the following:
"{text}"
CONCISE SUMMARY:
"""


combine_prompt = """
Write a concise summary of the following text delimited by triple backquotes.
Return your response in a detailed verbose paragraph which covers the text. Make it as insightful to the reader as possible, write like a masters student.

```{text}```

SUMMARY:
"""


def count_tokens(text):
    return palm.count_message_tokens(prompt=text)["token_count"]


async def PalmTextModel(text, candidates=1):
    url = f"https://generativelanguage.googleapis.com/v1beta2/models/text-bison-001:generateText?key={API_KEY}"

    headers = {
        "Content-Type": "application/json",
    }

    data = {
        "prompt": {"text": text},
        "temperature": 0.95,
        "top_k": 100,
        "top_p": 0.95,
        "candidate_count": candidates,
        "max_output_tokens": 1024,
        "stop_sequences": ["</output>"],
        "safety_settings": [
            {"category": "HARM_CATEGORY_DEROGATORY", "threshold": 4},
            {"category": "HARM_CATEGORY_TOXICITY", "threshold": 4},
            {"category": "HARM_CATEGORY_VIOLENCE", "threshold": 4},
            {"category": "HARM_CATEGORY_SEXUAL", "threshold": 4},
            {"category": "HARM_CATEGORY_MEDICAL", "threshold": 4},
            {"category": "HARM_CATEGORY_DANGEROUS", "threshold": 4},
        ],
    }

    async with aiohttp.ClientSession() as session:
        async with session.post(url, json=data, headers=headers) as response:
            if response.status == 200:
                result = await response.json()
                # print(result)
                if candidates > 1:
                    temp = [candidate["output"] for candidate in result["candidates"]]
                    return temp
                temp = result["candidates"][0]["output"]
                return temp
            else:
                print(f"Error: {response.status}\n{await response.text()}")


# async def PalmTextModel(message):
#     global CHAT_CODE
#     if CHAT_CODE == "":
#         for chunk in client.send_message(bot, message):
#             pass
#         CHAT_CODE = chunk["chatCode"]
#     else:
#         for chunk in client.send_message(bot, message, chatCode=CHAT_CODE):
#             pass

#     return chunk["text"]


async def Summarizer(essay):
    docs = text_splitter.create_documents([essay])

    # for 1 large document
    if len(docs) == 1:
        tasks = [
            PalmTextModel(combine_prompt.format(text=doc.page_content)) for doc in docs
        ]
        # Gather and execute the tasks concurrently
        responses = await asyncio.gather(*tasks)
        ans = " ".join(responses)
        return ans

    tasks = [PalmTextModel(map_prompt.format(text=doc.page_content)) for doc in docs]
    # Gather and execute the tasks concurrently
    responses = await asyncio.gather(*tasks)
    main = " ".join(responses)
    ans = await PalmTextModel(combine_prompt.format(text=main))
    return ans