Spaces:
Sleeping
Sleeping
Stefan Grandl
commited on
Commit
·
91c6835
1
Parent(s):
4050fe9
switched to Llama2 model and tokenizer
Browse files
app.py
CHANGED
@@ -1,14 +1,17 @@
|
|
1 |
import gradio as gr
|
2 |
import torch
|
3 |
-
from transformers import AutoModelForCausalLM, AutoTokenizer
|
|
|
4 |
|
5 |
-
title = "🤖AI ChatBot"
|
6 |
description = "A State-of-the-Art Large-scale Pretrained Response generation model (DialoGPT)"
|
7 |
examples = [["How are you?"]]
|
8 |
|
9 |
|
10 |
-
tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-large")
|
11 |
-
model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-large")
|
|
|
|
|
12 |
|
13 |
|
14 |
def predict(input, history=[]):
|
|
|
1 |
import gradio as gr
|
2 |
import torch
|
3 |
+
from transformers import (AutoModelForCausalLM, AutoTokenizer,
|
4 |
+
LlamaForCausalLM, LlamaTokenizer)
|
5 |
|
6 |
+
title = "🤖AI HeavyMetal-ChatBot"
|
7 |
description = "A State-of-the-Art Large-scale Pretrained Response generation model (DialoGPT)"
|
8 |
examples = [["How are you?"]]
|
9 |
|
10 |
|
11 |
+
# tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-large")
|
12 |
+
# model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-large")
|
13 |
+
tokenizer = LlamaTokenizer.from_pretrained("hf-internal-testing/llama-tokenizer")
|
14 |
+
model = LlamaForCausalLM.from_pretrained("hf-internal-testing/llama-tokenizer")
|
15 |
|
16 |
|
17 |
def predict(input, history=[]):
|