warhawkmonk commited on
Commit
fc0922c
·
verified ·
1 Parent(s): 5f1b674

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -8
app.py CHANGED
@@ -98,10 +98,10 @@ def image_to_base64(image_path):
98
  return base64.b64encode(img_file.read()).decode()
99
 
100
 
101
- # @st.cache_resource
102
- # def load_model():
103
- # pipeline_ = AutoPipelineForInpainting.from_pretrained("kandinsky-community/kandinsky-2-2-decoder-inpaint", torch_dtype=torch.float16).to("cuda")
104
- # return pipeline_
105
 
106
  # @st.cache_resource
107
  def prompt_improvment(pre_prompt):
@@ -135,10 +135,10 @@ def numpy_to_list(array):
135
 
136
 
137
 
138
- # @st.cache_resource
139
- # def llm_text_response():
140
- # llm = Ollama(model="llama3:latest",num_ctx=1000)
141
- # return llm.stream
142
 
143
  def model_single_out(prompt):
144
  pipe=load_model()
 
98
  return base64.b64encode(img_file.read()).decode()
99
 
100
 
101
+ @st.cache_resource
102
+ def load_model():
103
+ pipeline_ = AutoPipelineForInpainting.from_pretrained("kandinsky-community/kandinsky-2-2-decoder-inpaint", torch_dtype=torch.float16).to("cuda")
104
+ return pipeline_
105
 
106
  # @st.cache_resource
107
  def prompt_improvment(pre_prompt):
 
135
 
136
 
137
 
138
+ @st.cache_resource
139
+ def llm_text_response():
140
+ llm = Ollama(model="llama3:latest",num_ctx=1000)
141
+ return llm.stream
142
 
143
  def model_single_out(prompt):
144
  pipe=load_model()