import aiohttp import asyncio, pprint import google.generativeai as palm from langchain.text_splitter import RecursiveCharacterTextSplitter from langchain import PromptTemplate import os # from poe_api_wrapper import PoeApi import pprint # client = PoeApi("sXvCnfYy8CHnXNTRlxhmVg==") bot = "Assistant" CHAT_CODE = "" PALM_API = "" API_KEY = os.environ.get("PALM_API", PALM_API) palm.configure(api_key=API_KEY) text_splitter = RecursiveCharacterTextSplitter( separators=["\n\n", "\n", "."], chunk_size=1500, length_function=len, chunk_overlap=100, ) map_prompt = """ Write a verbose summary like a masters student of the following: "{text}" CONCISE SUMMARY: """ combine_prompt = """ Write a concise summary of the following text delimited by triple backquotes. Return your response in a detailed verbose paragraph which covers the text. Make it as insightful to the reader as possible, write like a masters student. ```{text}``` SUMMARY: """ def count_tokens(text): return palm.count_message_tokens(prompt=text)["token_count"] async def PalmTextModel(text, candidates=1): url = f"https://generativelanguage.googleapis.com/v1beta2/models/text-bison-001:generateText?key={API_KEY}" headers = { "Content-Type": "application/json", } data = { "prompt": {"text": text}, "temperature": 0.95, "top_k": 100, "top_p": 0.95, "candidate_count": candidates, "max_output_tokens": 1024, "stop_sequences": [""], "safety_settings": [ {"category": "HARM_CATEGORY_DEROGATORY", "threshold": 4}, {"category": "HARM_CATEGORY_TOXICITY", "threshold": 4}, {"category": "HARM_CATEGORY_VIOLENCE", "threshold": 4}, {"category": "HARM_CATEGORY_SEXUAL", "threshold": 4}, {"category": "HARM_CATEGORY_MEDICAL", "threshold": 4}, {"category": "HARM_CATEGORY_DANGEROUS", "threshold": 4}, ], } async with aiohttp.ClientSession() as session: async with session.post(url, json=data, headers=headers) as response: if response.status == 200: result = await response.json() # print(result) if candidates > 1: temp = [candidate["output"] for candidate in result["candidates"]] return temp temp = result["candidates"][0]["output"] return temp else: print(f"Error: {response.status}\n{await response.text()}") # async def PalmTextModel(message): # global CHAT_CODE # if CHAT_CODE == "": # for chunk in client.send_message(bot, message): # pass # CHAT_CODE = chunk["chatCode"] # else: # for chunk in client.send_message(bot, message, chatCode=CHAT_CODE): # pass # return chunk["text"] async def Summarizer(essay): docs = text_splitter.create_documents([essay]) # for 1 large document if len(docs) == 1: tasks = [ PalmTextModel(combine_prompt.format(text=doc.page_content)) for doc in docs ] # Gather and execute the tasks concurrently responses = await asyncio.gather(*tasks) ans = " ".join(responses) return ans tasks = [PalmTextModel(map_prompt.format(text=doc.page_content)) for doc in docs] # Gather and execute the tasks concurrently responses = await asyncio.gather(*tasks) main = " ".join(responses) ans = await PalmTextModel(combine_prompt.format(text=main)) return ans