Renegadesoffun commited on
Commit
861d880
Β·
1 Parent(s): 01e5d9a

Updated the chatbot to use DialoGPT-small model

Browse files
Files changed (1) hide show
  1. app.py +7 -6
app.py CHANGED
@@ -1,13 +1,14 @@
1
  import streamlit as st
2
- from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
3
 
4
- # Initialize the model and tokenizer
5
- model_name = "TheBloke/Mistral-7B-OpenOrca-GGUF"
6
- base_model_name = "EleutherAI/gpt-j-1.1B" # Base model for tokenizer
7
- tokenizer = AutoTokenizer.from_pretrained("TheBloke/Mistral-7B-OpenOrca-GGUF")
 
8
 
9
  model = AutoModelForCausalLM.from_pretrained(model_name)
10
- chat_pipeline = pipeline("text-generation", model=model, tokenizer=tokenizer, device=0) # device=0 means use CPU
11
 
12
  # Streamlit UI
13
  st.title("Buddy Christ Chatbot 🌟")
 
1
  import streamlit as st
2
+ from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
3
 
4
+ # Use a smaller model
5
+ model_name = "microsoft/DialoGPT-small"
6
+
7
+ # Use the model's default GPT2 tokenizer
8
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
9
 
10
  model = AutoModelForCausalLM.from_pretrained(model_name)
11
+ chat_pipeline = pipeline("text-generation", model=model, tokenizer=tokenizer, device=0, truncation=True, max_length=1000)
12
 
13
  # Streamlit UI
14
  st.title("Buddy Christ Chatbot 🌟")