File size: 552 Bytes
99dc2e8
3ca8079
 
 
 
 
 
 
d6eb62f
3ca8079
 
 
 
 
 
f5249eb
3ca8079
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
import spaces
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
import gradio as gr
import torch

if torch.cuda.is_available():
    tokenizer = AutoTokenizer.from_pretrained("ai-forever/mGPT-13B")
    model = AutoModelForCausalLM.from_pretrained("ai-forever/mGPT-13B", load_in_8bit=True, device_map="auto")
    pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)

@spaces.GPU
def predict(text):
    return pipe(text)

demo = gr.Interface(
    fn=predict,
    inputs=["text"],
    outputs=["text"],
)

demo.launch()