Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -2,15 +2,17 @@ import streamlit as st
|
|
2 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
3 |
import torch
|
4 |
import os
|
|
|
5 |
|
|
|
6 |
# App title and description
|
7 |
st.title("I am Your GrowBuddy 🌱")
|
8 |
st.write("Let me help you start gardening. Let's grow together!")
|
9 |
|
10 |
def load_model():
|
11 |
try:
|
12 |
-
tokenizer = AutoTokenizer.from_pretrained("KhunPop/Gardening")
|
13 |
-
model = AutoModelForCausalLM.from_pretrained("google/gemma-2b-it")
|
14 |
return tokenizer, model
|
15 |
except Exception as e:
|
16 |
st.error(f"Failed to load model: {e}")
|
|
|
2 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
3 |
import torch
|
4 |
import os
|
5 |
+
load_dotenv()
|
6 |
|
7 |
+
api_key = os.getenv("api_key")
|
8 |
# App title and description
|
9 |
st.title("I am Your GrowBuddy 🌱")
|
10 |
st.write("Let me help you start gardening. Let's grow together!")
|
11 |
|
12 |
def load_model():
|
13 |
try:
|
14 |
+
tokenizer = AutoTokenizer.from_pretrained("KhunPop/Gardening", use_auth_token=api_key)
|
15 |
+
model = AutoModelForCausalLM.from_pretrained("google/gemma-2b-it", use_auth_token=api_key)
|
16 |
return tokenizer, model
|
17 |
except Exception as e:
|
18 |
st.error(f"Failed to load model: {e}")
|