rexthecoder commited on
Commit
d86afc2
·
1 Parent(s): 3576d20

chore: fix

Browse files
Files changed (2) hide show
  1. src/agent/tools/conversation.py +32 -20
  2. src/api.py +6 -9
src/agent/tools/conversation.py CHANGED
@@ -30,27 +30,39 @@ class Conversation():
30
  # bot_input_ids = torch.cat([chat_history_ids, new_user_input_ids], dim=-1)
31
  # chat_history_ids =self.model.generate(bot_input_ids, max_length=1000, pad_token_id=self.tokenizer.eos_token_id)
32
  # return "{}".format(self.tokenizer.decode(chat_history_ids[:, bot_input_ids.shape[-1]:][0], skip_special_tokens=True))
33
-
34
- def generate(self, instruction, knowledge, dialog):
35
- if knowledge != '':
36
- knowledge = '[KNOWLEDGE] ' + knowledge
37
- dialog = ' EOS '.join(dialog)
38
- query = f"{instruction} [CONTEXT] {dialog} {knowledge}"
39
- input_ids = self.tokenizer(f"{query}", return_tensors="pt").input_ids
40
- outputs = self.model.generate(
41
- input_ids, max_length=128,
42
- min_length=8,
43
- top_p=0.9,
44
- do_sample=True,
45
- )
46
- output = self.tokenizer.decode(outputs[0], skip_special_tokens=True)
47
- return output
 
 
 
 
 
 
 
 
 
 
 
 
48
 
49
  async def process_conversation(self, update: Update, context: CallbackContext) -> int:
50
  message = update.message.text
51
- instruction = f'Instruction: given a dialog context, you need to response empathically.'
52
- knowledge = ''
53
- dialog = []
54
- dialog .append(message)
55
- text = self.generate(instruction, knowledge, dialog)
56
  await update.message.reply_text(f'{text}')
 
30
  # bot_input_ids = torch.cat([chat_history_ids, new_user_input_ids], dim=-1)
31
  # chat_history_ids =self.model.generate(bot_input_ids, max_length=1000, pad_token_id=self.tokenizer.eos_token_id)
32
  # return "{}".format(self.tokenizer.decode(chat_history_ids[:, bot_input_ids.shape[-1]:][0], skip_special_tokens=True))
33
+ def predict(self, input, history=[]):
34
+ instruction = "Instruction: given a dialog context and related knowledge, you need to answer the question based on the knowledge."
35
+ knowledge = '[KNOWLEDGE] '
36
+ s = list(sum(history, ()))
37
+ s.append(input)
38
+ dialog = ' EOS '.join(s)
39
+ query = f"{instruction} [CONTEXT] {dialog} {knowledge}"
40
+ input_ids = self.tokenizer.encode(f"{query}", return_tensors='pt')
41
+ print(input, s)
42
+ output = self.model.generate(input_ids, max_length=128, min_length=8, top_p=0.9, do_sample=True).tolist()
43
+ response = self.tokenizer.decode(output[0], skip_special_tokens=True)
44
+ history.append((input, response))
45
+ return response
46
+ # def generate(self, instruction, knowledge, dialog):
47
+ # if knowledge != '':
48
+ # knowledge = '[KNOWLEDGE] ' + knowledge
49
+ # dialog = ' EOS '.join(dialog)
50
+ # query = f"{instruction} [CONTEXT] {dialog} {knowledge}"
51
+ # input_ids = self.tokenizer(f"{query}", return_tensors="pt").input_ids
52
+ # outputs = self.model.generate(
53
+ # input_ids, max_length=128,
54
+ # min_length=8,
55
+ # top_p=0.9,
56
+ # do_sample=True,
57
+ # )
58
+ # output = self.tokenizer.decode(outputs[0], skip_special_tokens=True)
59
+ # return output
60
 
61
  async def process_conversation(self, update: Update, context: CallbackContext) -> int:
62
  message = update.message.text
63
+ # instruction = f'Instruction: given a dialog context, you need to response empathically.'
64
+ # knowledge = ''
65
+ # dialog = []
66
+ # dialog .append(message)
67
+ text = self.predict(message)
68
  await update.message.reply_text(f'{text}')
src/api.py CHANGED
@@ -4,7 +4,7 @@ from typing import List, Optional, Type
4
  from pydantic import Field
5
 
6
 
7
- from agent.base import LangChainAgentBot
8
  from telegram.ext import Updater, CommandHandler, CallbackContext, Application, ContextTypes
9
  from telegram import Update
10
  from telegram.ext import (
@@ -20,14 +20,10 @@ VERBOSE = True
20
 
21
 
22
  async def hello(update: Update, context: CallbackContext) -> None:
23
- intro_text = f"""
24
- 🤖 Welcome to BearBuddy, created by rexthecoder! I'm your extraordinary AI companion capable of accomplishing the impossible!
25
- 💬 Feel free to ask me about anything, whether it's mouthwatering 🍔 recipes, exciting ✈️ travel destinations, effective 🏋️‍♀️ fitness routines, strategic 📱 marketing tips, or any other topic you can think of.
26
- Don't worry about the language barrier—I'm here to assist you in any language!
27
- How can I assist you today?
28
- """
29
  await update.message.reply_text(intro_text)
30
 
 
31
  class GirlfriendGPT(LangChainAgentBot, ):
32
  """Deploy LangChain chatbots and connect them to Telegram."""
33
 
@@ -41,10 +37,11 @@ class GirlfriendGPT(LangChainAgentBot, ):
41
  # Run the bot until the user presses Ctrl-C
42
  # self.application.run_polling()
43
  self.token = token
44
-
45
  def handlers(self):
46
  summary_handler = self.conversation_summary_handler()
47
- self.application.add_handler(MessageHandler(filters.TEXT & ~filters.COMMAND, self.process_conversation))
 
48
  self.application.add_handler(summary_handler)
49
  self.application.add_handler(CommandHandler('start', hello))
50
 
 
4
  from pydantic import Field
5
 
6
 
7
+ from agent.base import LangChainAgentBot
8
  from telegram.ext import Updater, CommandHandler, CallbackContext, Application, ContextTypes
9
  from telegram import Update
10
  from telegram.ext import (
 
20
 
21
 
22
  async def hello(update: Update, context: CallbackContext) -> None:
23
+ intro_text = "🤖 Welcome to BearBuddy, crafted by rexthecoder! I'm your extraordinary AI companion capable of accomplishing the impossible!\n\n💬 Feel free to ask me about anything, whether it's mouthwatering 🍔 recipes, exciting ✈️ travel destinations, effective 🏋️‍♀️ fitness routines, strategic 📱 marketing tips, or any other topic you can think of.\n\nDon't worry about the language barrier—I'm here to assist you in any language!\nHow can I assist you today?"
 
 
 
 
 
24
  await update.message.reply_text(intro_text)
25
 
26
+
27
  class GirlfriendGPT(LangChainAgentBot, ):
28
  """Deploy LangChain chatbots and connect them to Telegram."""
29
 
 
37
  # Run the bot until the user presses Ctrl-C
38
  # self.application.run_polling()
39
  self.token = token
40
+
41
  def handlers(self):
42
  summary_handler = self.conversation_summary_handler()
43
+ self.application.add_handler(MessageHandler(
44
+ filters.TEXT & ~filters.COMMAND, self.process_conversation))
45
  self.application.add_handler(summary_handler)
46
  self.application.add_handler(CommandHandler('start', hello))
47