Spaces:
Runtime error
Runtime error
Commit
·
e43ab69
1
Parent(s):
5732a0e
chore: started
Browse files- main.py +1 -1
- src/agent/tools/conversation.py +5 -3
main.py
CHANGED
@@ -43,7 +43,7 @@ class LoggingDisabled:
|
|
43 |
|
44 |
def main():
|
45 |
app = Application.builder().token(
|
46 |
-
'6207542226:
|
47 |
|
48 |
run_agent(
|
49 |
agent=GirlfriendGPT(
|
|
|
43 |
|
44 |
def main():
|
45 |
app = Application.builder().token(
|
46 |
+
'6207542226:AAF1Lir2JjjFfux8KYyAzOEzzTjUIhPB4bI',).build()
|
47 |
|
48 |
run_agent(
|
49 |
agent=GirlfriendGPT(
|
src/agent/tools/conversation.py
CHANGED
@@ -20,12 +20,14 @@ class Conversation():
|
|
20 |
tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-medium")
|
21 |
model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-medium")
|
22 |
|
|
|
|
|
23 |
async def talk(self, message: str):
|
24 |
logging.info(f"{message}")
|
25 |
new_user_input_ids = self.tokenizer(message + self.tokenizer.eos_token, return_tensors='pt')
|
26 |
-
bot_input_ids = torch.cat([chat_history_ids, new_user_input_ids], dim=-1)
|
27 |
-
chat_history_ids =self.model.generate(bot_input_ids, max_length=1000, pad_token_id=self.tokenizer.eos_token_id)
|
28 |
-
return "{}".format(self.tokenizer.decode(chat_history_ids[:, bot_input_ids.shape[-1]:][0], skip_special_tokens=True))
|
29 |
|
30 |
|
31 |
async def process_conversation(self, update: Update, context: CallbackContext) -> int:
|
|
|
20 |
tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-medium")
|
21 |
model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-medium")
|
22 |
|
23 |
+
chat_history_ids = torch.tensor([], dtype=torch.long)
|
24 |
+
|
25 |
async def talk(self, message: str):
|
26 |
logging.info(f"{message}")
|
27 |
new_user_input_ids = self.tokenizer(message + self.tokenizer.eos_token, return_tensors='pt')
|
28 |
+
bot_input_ids = torch.cat([self.chat_history_ids, new_user_input_ids], dim=-1)
|
29 |
+
self.chat_history_ids =self.model.generate(bot_input_ids, max_length=1000, pad_token_id=self.tokenizer.eos_token_id)
|
30 |
+
return "{}".format(self.tokenizer.decode(self.chat_history_ids[:, bot_input_ids.shape[-1]:][0], skip_special_tokens=True))
|
31 |
|
32 |
|
33 |
async def process_conversation(self, update: Update, context: CallbackContext) -> int:
|