File size: 706 Bytes
9ec795d
957486f
 
 
 
9ec795d
957486f
9ec795d
957486f
 
 
 
 
 
9ec795d
957486f
9ec795d
957486f
 
604080b
957486f
9ec795d
 
 
957486f
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
import gradio as gr
from transformers import pipeline
import torch
import random
import gradio as gr

model_path = "shuttie/mistral-7b-dadjokes-v2"

generator = pipeline(
            task="text-generation",
            model=model_path,
            torch_dtype=torch.bfloat16,
            device_map="auto",
        )

prompt = "### Instruction:\nContinue a dad joke:\n\n### Input:\n{input}\n\n### Response:\n"

def make_response(message, history):
    input = prompt.format(input=message)
    generated = generator(input, return_full_text=False, max_new_tokens=128, num_return_sequences=1)
    return generated[0]["generated_text"]


if __name__ == "__main__":
    gr.ChatInterface(make_response).launch()