Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -2,9 +2,10 @@ import streamlit as st
|
|
2 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
3 |
import torch
|
4 |
|
5 |
-
# Load the
|
6 |
-
|
7 |
-
|
|
|
8 |
|
9 |
# Initialize conversation history if not present
|
10 |
if "conversation" not in st.session_state:
|
|
|
2 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
3 |
import torch
|
4 |
|
5 |
+
# Load the Zephyr-7B-Alpha model (fully open and optimized for instruction-following)
|
6 |
+
MODEL_NAME = "HuggingFaceH4/zephyr-7b-alpha"
|
7 |
+
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
|
8 |
+
model = AutoModelForCausalLM.from_pretrained(MODEL_NAME, torch_dtype=torch.float16, device_map="auto")
|
9 |
|
10 |
# Initialize conversation history if not present
|
11 |
if "conversation" not in st.session_state:
|