Mbonea commited on
Commit
89ebcd8
·
1 Parent(s): 647122b

Summarize this

Browse files
Files changed (1) hide show
  1. App/Chat/utils/Summarize.py +50 -50
App/Chat/utils/Summarize.py CHANGED
@@ -45,56 +45,56 @@ def count_tokens(text):
45
  return palm.count_message_tokens(prompt=text)["token_count"]
46
 
47
 
48
- # async def PalmTextModel(text, candidates=1):
49
- # url = f"https://generativelanguage.googleapis.com/v1beta2/models/text-bison-001:generateText?key={API_KEY}"
50
-
51
- # headers = {
52
- # "Content-Type": "application/json",
53
- # }
54
-
55
- # data = {
56
- # "prompt": {"text": text},
57
- # "temperature": 0.95,
58
- # "top_k": 100,
59
- # "top_p": 0.95,
60
- # "candidate_count": candidates,
61
- # "max_output_tokens": 1024,
62
- # "stop_sequences": ["</output>"],
63
- # "safety_settings": [
64
- # {"category": "HARM_CATEGORY_DEROGATORY", "threshold": 4},
65
- # {"category": "HARM_CATEGORY_TOXICITY", "threshold": 4},
66
- # {"category": "HARM_CATEGORY_VIOLENCE", "threshold": 4},
67
- # {"category": "HARM_CATEGORY_SEXUAL", "threshold": 4},
68
- # {"category": "HARM_CATEGORY_MEDICAL", "threshold": 4},
69
- # {"category": "HARM_CATEGORY_DANGEROUS", "threshold": 4},
70
- # ],
71
- # }
72
-
73
- # async with aiohttp.ClientSession() as session:
74
- # async with session.post(url, json=data, headers=headers) as response:
75
- # if response.status == 200:
76
- # result = await response.json()
77
- # # print(result)
78
- # if candidates > 1:
79
- # temp = [candidate["output"] for candidate in result["candidates"]]
80
- # return temp
81
- # temp = result["candidates"][0]["output"]
82
- # return temp
83
- # else:
84
- # print(f"Error: {response.status}\n{await response.text()}")
85
-
86
-
87
- async def PalmTextModel(message):
88
- global CHAT_CODE
89
- if CHAT_CODE == "":
90
- for chunk in client.send_message(bot, message):
91
- pass
92
- CHAT_CODE = chunk["chatCode"]
93
- else:
94
- for chunk in client.send_message(bot, message, chatCode=CHAT_CODE):
95
- pass
96
-
97
- return chunk["text"]
98
 
99
 
100
  async def Summarizer(essay):
 
45
  return palm.count_message_tokens(prompt=text)["token_count"]
46
 
47
 
48
+ async def PalmTextModel(text, candidates=1):
49
+ url = f"https://generativelanguage.googleapis.com/v1beta2/models/text-bison-001:generateText?key={API_KEY}"
50
+
51
+ headers = {
52
+ "Content-Type": "application/json",
53
+ }
54
+
55
+ data = {
56
+ "prompt": {"text": text},
57
+ "temperature": 0.95,
58
+ "top_k": 100,
59
+ "top_p": 0.95,
60
+ "candidate_count": candidates,
61
+ "max_output_tokens": 1024,
62
+ "stop_sequences": ["</output>"],
63
+ "safety_settings": [
64
+ {"category": "HARM_CATEGORY_DEROGATORY", "threshold": 4},
65
+ {"category": "HARM_CATEGORY_TOXICITY", "threshold": 4},
66
+ {"category": "HARM_CATEGORY_VIOLENCE", "threshold": 4},
67
+ {"category": "HARM_CATEGORY_SEXUAL", "threshold": 4},
68
+ {"category": "HARM_CATEGORY_MEDICAL", "threshold": 4},
69
+ {"category": "HARM_CATEGORY_DANGEROUS", "threshold": 4},
70
+ ],
71
+ }
72
+
73
+ async with aiohttp.ClientSession() as session:
74
+ async with session.post(url, json=data, headers=headers) as response:
75
+ if response.status == 200:
76
+ result = await response.json()
77
+ # print(result)
78
+ if candidates > 1:
79
+ temp = [candidate["output"] for candidate in result["candidates"]]
80
+ return temp
81
+ temp = result["candidates"][0]["output"]
82
+ return temp
83
+ else:
84
+ print(f"Error: {response.status}\n{await response.text()}")
85
+
86
+
87
+ # async def PalmTextModel(message):
88
+ # global CHAT_CODE
89
+ # if CHAT_CODE == "":
90
+ # for chunk in client.send_message(bot, message):
91
+ # pass
92
+ # CHAT_CODE = chunk["chatCode"]
93
+ # else:
94
+ # for chunk in client.send_message(bot, message, chatCode=CHAT_CODE):
95
+ # pass
96
+
97
+ # return chunk["text"]
98
 
99
 
100
  async def Summarizer(essay):