Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,12 +1,15 @@
|
|
1 |
import gradio as gr
|
2 |
import random
|
|
|
|
|
3 |
from transformers import pipeline
|
4 |
from transformers import GPT2Tokenizer, GPT2LMHeadModel
|
|
|
5 |
#https://huggingface.co/facebook/opt-1.3b
|
6 |
#generator = pipeline('text-generation', model="microsoft/DialoGPT-medium")
|
7 |
tokenizer = GPT2Tokenizer.from_pretrained('microsoft/DialoGPT-medium')
|
8 |
#model = GPT2LMHeadModel.from_pretrained('microsoft/DialoGPT-medium')
|
9 |
-
model = GPT2LMHeadModel.from_pretrained('zmbfeng/FineTune-1'
|
10 |
|
11 |
def create_response(input_str):
|
12 |
#output_raw= generator(input_str)
|
|
|
1 |
import gradio as gr
|
2 |
import random
|
3 |
+
import os
|
4 |
+
from huggingface_hub import login
|
5 |
from transformers import pipeline
|
6 |
from transformers import GPT2Tokenizer, GPT2LMHeadModel
|
7 |
+
login(os.environ["HF_TOKEN"])
|
8 |
#https://huggingface.co/facebook/opt-1.3b
|
9 |
#generator = pipeline('text-generation', model="microsoft/DialoGPT-medium")
|
10 |
tokenizer = GPT2Tokenizer.from_pretrained('microsoft/DialoGPT-medium')
|
11 |
#model = GPT2LMHeadModel.from_pretrained('microsoft/DialoGPT-medium')
|
12 |
+
model = GPT2LMHeadModel.from_pretrained('zmbfeng/FineTune-1')
|
13 |
|
14 |
def create_response(input_str):
|
15 |
#output_raw= generator(input_str)
|