Spaces:
Sleeping
Sleeping
Commit
·
ba9c49b
1
Parent(s):
91c6835
Update app.py
Browse files
app.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
import gradio as gr
|
2 |
import torch
|
3 |
-
from transformers import (AutoModelForCausalLM, AutoTokenizer,
|
4 |
LlamaForCausalLM, LlamaTokenizer)
|
5 |
|
6 |
title = "🤖AI HeavyMetal-ChatBot"
|
@@ -10,8 +10,12 @@ examples = [["How are you?"]]
|
|
10 |
|
11 |
# tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-large")
|
12 |
# model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-large")
|
13 |
-
tokenizer = LlamaTokenizer.from_pretrained("hf-internal-testing/llama-tokenizer")
|
14 |
-
model = LlamaForCausalLM.from_pretrained("hf-internal-testing/llama-tokenizer")
|
|
|
|
|
|
|
|
|
15 |
|
16 |
|
17 |
def predict(input, history=[]):
|
|
|
1 |
import gradio as gr
|
2 |
import torch
|
3 |
+
from transformers import (AutoModel, AutoModelForCausalLM, AutoTokenizer,
|
4 |
LlamaForCausalLM, LlamaTokenizer)
|
5 |
|
6 |
title = "🤖AI HeavyMetal-ChatBot"
|
|
|
10 |
|
11 |
# tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-large")
|
12 |
# model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-large")
|
13 |
+
# tokenizer = LlamaTokenizer.from_pretrained("hf-internal-testing/llama-tokenizer")
|
14 |
+
# model = LlamaForCausalLM.from_pretrained("hf-internal-testing/llama-tokenizer")
|
15 |
+
|
16 |
+
model = "meta-llama/Llama-2-7b-chat-hf"
|
17 |
+
|
18 |
+
tokenizer = AutoTokenizer.from_pretrained(model)
|
19 |
|
20 |
|
21 |
def predict(input, history=[]):
|