Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -12,19 +12,24 @@ api_key = os.getenv("api_key")
|
|
12 |
st.title("I am Your GrowBuddy 🌱")
|
13 |
st.write("Let me help you start gardening. Let's grow together!")
|
14 |
|
15 |
-
# Function to load model
|
16 |
def load_model():
|
17 |
try:
|
18 |
-
tokenizer
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
|
|
|
|
|
|
|
|
|
|
23 |
except Exception as e:
|
24 |
st.error(f"Failed to load model: {e}")
|
25 |
return None, None
|
26 |
|
27 |
-
# Load model and tokenizer
|
28 |
tokenizer, model = load_model()
|
29 |
|
30 |
if not tokenizer or not model:
|
|
|
12 |
st.title("I am Your GrowBuddy 🌱")
|
13 |
st.write("Let me help you start gardening. Let's grow together!")
|
14 |
|
15 |
+
# Function to load model only once
|
16 |
def load_model():
|
17 |
try:
|
18 |
+
# If model and tokenizer are already in session state, return them
|
19 |
+
if "tokenizer" in st.session_state and "model" in st.session_state:
|
20 |
+
return st.session_state.tokenizer, st.session_state.model
|
21 |
+
else:
|
22 |
+
tokenizer = AutoTokenizer.from_pretrained("KhunPop/Gardening")
|
23 |
+
model = AutoModelForCausalLM.from_pretrained("unsloth/gemma-2-2b")
|
24 |
+
# Store the model and tokenizer in session state
|
25 |
+
st.session_state.tokenizer = tokenizer
|
26 |
+
st.session_state.model = model
|
27 |
+
return tokenizer, model
|
28 |
except Exception as e:
|
29 |
st.error(f"Failed to load model: {e}")
|
30 |
return None, None
|
31 |
|
32 |
+
# Load model and tokenizer (cached)
|
33 |
tokenizer, model = load_model()
|
34 |
|
35 |
if not tokenizer or not model:
|