File size: 1,815 Bytes
92360e8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
from transformers import LongT5ForConditionalGeneration, AutoTokenizer
import time

N = 2 # Number of previous QA pairs to use for context
MAX_NEW_TOKENS = 128 # Maximum number of tokens for each answer

tokenizer = AutoTokenizer.from_pretrained("tryolabs/long-t5-tglobal-base-blogpost-cqa")
model = LongT5ForConditionalGeneration.from_pretrained("tryolabs/long-t5-tglobal-base-blogpost-cqa")

with open("context_short.txt", "r") as f:
    context = f.read()

def build_input(question, user_history=[], bot_history=[]):
    model_input = f"{context} || "
    previous = min(len(bot_history[1:]), N)
    for i in range(previous, 0, -1):
        prev_question = user_history[-i-1]
        prev_answer = bot_history[-i]
        model_input += f"<Q{i}> {prev_question} <A{i}> {prev_answer} "
    model_input += f"<Q> {question} <A> "
    return model_input

def get_model_answer(question, user_history=[], bot_history=[]):
    start = time.perf_counter()
    model_input = build_input(question, user_history, bot_history)
    end = time.perf_counter()
    print(f"Build input: {end-start}")
    start = time.perf_counter()
    encoded_inputs = tokenizer(model_input, max_length=3000, truncation=True, return_tensors="pt")
    input_ids, attention_mask = (
        encoded_inputs.input_ids,
        encoded_inputs.attention_mask
    )
    end = time.perf_counter()
    print(f"Tokenize: {end-start}")
    start = time.perf_counter()
    encoded_output = model.generate(input_ids=input_ids, attention_mask=attention_mask, do_sample=True, max_new_tokens=MAX_NEW_TOKENS)
    answer = tokenizer.decode(encoded_output[0], skip_special_tokens=True)
    end = time.perf_counter()
    print(f"Generate: {end-start}")
    user_history.append(question)
    bot_history.append(answer)
    return answer, user_history, bot_history