File size: 1,124 Bytes
44d180e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
from transformers import AutoTokenizer, AutoModelForCausalLM
import transformers
import torch


class Model():
    def __init__(self, model="tiiuae/falcon-7b-instruct") -> None:
        pass
        self.tokenizer = AutoTokenizer.from_pretrained(model)
        self.pipeline = transformers.pipeline(
            "text-generation",
            model=model,
            tokenizer=self.tokenizer,
            torch_dtype=torch.bfloat16,
            trust_remote_code=True,
            device_map="auto",
        )

    def gen(self, prompt, temp=0.0, max_length=200):
        sequences = self.pipeline(
            "Girafatron is obsessed with giraffes, the most glorious animal on the face of this Earth. Giraftron believes all other animals are irrelevant when compared to the glorious majesty of the giraffe.\nDaniel: Hello, Girafatron!\nGirafatron:",
            max_length=max_length,
            do_sample=True,
            temperature=temp,
            num_return_sequences=1,
            eos_token_id=self.tokenizer.eos_token_id,
        )
        
        return '\n'.join([seq['generated_text'] for seq in sequences])