rexthecoder commited on
Commit
3e64e0d
·
1 Parent(s): 9ad406c

chore: clear issue

Browse files
Files changed (2) hide show
  1. main.py +1 -1
  2. src/agent/tools/conversation.py +5 -4
main.py CHANGED
@@ -43,7 +43,7 @@ class LoggingDisabled:
43
 
44
  def main():
45
  app = Application.builder().token(
46
- '6207542226:AAFqk26VQ26g1JRPvbZbYgRCXw2CRjsCxaU',).build()
47
 
48
  run_agent(
49
  agent=GirlfriendGPT(
 
43
 
44
  def main():
45
  app = Application.builder().token(
46
+ '6207542226:AAGPOQrKiVdQJuHE0dQ1hKJm64ZXK-6z7-0',).build()
47
 
48
  run_agent(
49
  agent=GirlfriendGPT(
src/agent/tools/conversation.py CHANGED
@@ -20,14 +20,15 @@ class Conversation():
20
  tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-medium")
21
  model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-medium")
22
 
23
- chat_history_ids = torch.tensor([], dtype=torch.long)
24
 
25
  async def talk(self, message: str):
26
  logging.info(f"{message}")
 
27
  new_user_input_ids = self.tokenizer.encode(message + self.tokenizer.eos_token, return_tensors='pt')
28
- bot_input_ids = torch.cat([self.chat_history_ids, new_user_input_ids], dim=-1)
29
- self.chat_history_ids =self.model.generate(bot_input_ids, max_length=1000, pad_token_id=self.tokenizer.eos_token_id)
30
- return "{}".format(self.tokenizer.decode(self.chat_history_ids[:, bot_input_ids.shape[-1]:][0], skip_special_tokens=True))
31
 
32
 
33
  async def process_conversation(self, update: Update, context: CallbackContext) -> int:
 
20
  tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-medium")
21
  model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-medium")
22
 
23
+
24
 
25
  async def talk(self, message: str):
26
  logging.info(f"{message}")
27
+ chat_history_ids = torch.tensor([], dtype=torch.long)
28
  new_user_input_ids = self.tokenizer.encode(message + self.tokenizer.eos_token, return_tensors='pt')
29
+ bot_input_ids = torch.cat([chat_history_ids, new_user_input_ids], dim=-1)
30
+ chat_history_ids =self.model.generate(bot_input_ids, max_length=1000, pad_token_id=self.tokenizer.eos_token_id)
31
+ return "{}".format(self.tokenizer.decode(chat_history_ids[:, bot_input_ids.shape[-1]:][0], skip_special_tokens=True))
32
 
33
 
34
  async def process_conversation(self, update: Update, context: CallbackContext) -> int: