CosmoAI commited on
Commit
024f641
·
verified ·
1 Parent(s): 0f58c4b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +65 -30
app.py CHANGED
@@ -1,48 +1,83 @@
 
 
 
 
 
1
 
2
- import streamlit as st
3
- import json
4
- import google.generativeai as genai
5
 
6
 
7
- GOOGLE_API_KEY = "AIzaSyCUBaL7TdISL7lRuBy19_X0-OsZfgbIgEc"
8
- genai.configure(api_key=GOOGLE_API_KEY)
9
- model = genai.GenerativeModel('gemini-pro')
 
10
 
11
- def add_to_json(goal):
12
- try:
13
- with open("test.json", "r") as file:
14
- data = json.load(file)
15
- except FileNotFoundError:
16
- data = {"goals": []} # Create the file with an empty 'goals' list if it doesn't exist
 
 
17
 
18
- new_item = {"Goal": goal}
19
- data["goals"].append(new_item)
20
 
21
- with open("test.json", "w") as file:
22
- json.dump(data, file, indent=4)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
 
24
 
25
 
26
- def main():
27
- if prompt := st.chat_input("Hi, how can I help you?"):
28
- goals_prompt = f"""Act as a personal assistant... {prompt} """
29
- completion = model.generate_content(goals_prompt)
30
- add_to_json(prompt)
31
 
32
- with st.chat_message("Assistant"):
33
- st.write(completion.text)
34
 
35
 
36
 
37
- # Display JSON Data
38
- if st.button("Show JSON Data"):
39
- with open("test.json", "r") as file:
40
- data = json.load(file)
41
- st.json(data) # Streamlit's way to display JSON
42
 
43
 
44
- if __name__ == "__main__":
45
- main()
46
 
47
 
48
 
 
1
+ import streamlit as st
2
+ import torch
3
+ from diffusers import StableDiffusionXLPipeline, UNet2DConditionModel, EulerDiscreteScheduler
4
+ from huggingface_hub import hf_hub_download
5
+ from safetensors.torch import load_file
6
 
 
 
 
7
 
8
 
9
+ # Model Path/Repo Information
10
+ base = "stabilityai/stable-diffusion-xl-base-1.0"
11
+ repo = "ByteDance/SDXL-Lightning"
12
+ ckpt = "sdxl_lightning_4step_unet.safetensors"
13
 
14
+ # Load model (Executed only once for efficiency)
15
+ @st.cache_resource
16
+ def load_sdxl_pipeline():
17
+ unet = UNet2DConditionModel.from_config(base, subfolder="unet").to("cpu", torch.float16)
18
+ unet.load_state_dict(load_file(hf_hub_download(repo, ckpt), device="cpu"))
19
+ pipe = StableDiffusionXLPipeline.from_pretrained(base, unet=unet, torch_dtype=torch.float16, variant="fp16").to("cpu")
20
+ pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config, timestep_spacing="trailing")
21
+ return pipe
22
 
 
 
23
 
24
+ # Streamlit UI
25
+ st.title("Image Generation")
26
+ prompt = st.text_input("Enter your image prompt:")
27
+
28
+ if st.button("Generate Image"):
29
+ if not prompt:
30
+ st.warning("Please enter a prompt.")
31
+ else:
32
+ pipe = load_sdxl_pipeline() # Load the pipeline from cache
33
+ with torch.no_grad():
34
+ image = pipe(prompt).images[0]
35
+
36
+ st.image(image)
37
+
38
+
39
+
40
+
41
+
42
+ # GOOGLE_API_KEY = ""
43
+ # genai.configure(api_key=GOOGLE_API_KEY)
44
+ # model = genai.GenerativeModel('gemini-pro')
45
+
46
+ # def add_to_json(goal):
47
+ # try:
48
+ # with open("test.json", "r") as file:
49
+ # data = json.load(file)
50
+ # except FileNotFoundError:
51
+ # data = {"goals": []} # Create the file with an empty 'goals' list if it doesn't exist
52
+
53
+ # new_item = {"Goal": goal}
54
+ # data["goals"].append(new_item)
55
+
56
+ # with open("test.json", "w") as file:
57
+ # json.dump(data, file, indent=4)
58
 
59
 
60
 
61
+ # def main():
62
+ # if prompt := st.chat_input("Hi, how can I help you?"):
63
+ # goals_prompt = f"""Act as a personal assistant... {prompt} """
64
+ # completion = model.generate_content(goals_prompt)
65
+ # add_to_json(prompt)
66
 
67
+ # with st.chat_message("Assistant"):
68
+ # st.write(completion.text)
69
 
70
 
71
 
72
+ # # Display JSON Data
73
+ # if st.button("Show JSON Data"):
74
+ # with open("test.json", "r") as file:
75
+ # data = json.load(file)
76
+ # st.json(data) # Streamlit's way to display JSON
77
 
78
 
79
+ # if __name__ == "__main__":
80
+ # main()
81
 
82
 
83