Deepak Sahu commited on
Commit
0b99bf5
·
1 Parent(s): ebf589b

testing gradio caching

Browse files
Files changed (2) hide show
  1. z_embedding.py +5 -1
  2. z_hypothetical_summary.py +3 -1
z_embedding.py CHANGED
@@ -11,7 +11,11 @@ CACHE_SUMMARY_EMB_NPY = "app_cache/summary_vectors.npy"
11
 
12
  # Load Model
13
  # setting this at global level because entire runtime will continue to use this model.
14
- model = SentenceTransformer(EMB_MODEL)
 
 
 
 
15
 
16
 
17
 
 
11
 
12
  # Load Model
13
  # setting this at global level because entire runtime will continue to use this model.
14
+
15
+ import gradio as gr
16
+
17
+ if gr.NO_RELOAD:
18
+ model = SentenceTransformer(EMB_MODEL)
19
 
20
 
21
 
z_hypothetical_summary.py CHANGED
@@ -8,8 +8,10 @@ from transformers import pipeline, set_seed
8
  set_seed(42)
9
  TRAINED_CASUAL_MODEL = "LunaticMaestro/gpt2-book-summary-generator"
10
 
 
11
 
12
- generator_model = pipeline('text-generation', model=TRAINED_CASUAL_MODEL)
 
13
 
14
 
15
  def generate_summaries(book_title: str, genre: Optional[str] = None, n_samples=2, top_k = 50, top_p = 0.85, ) -> list[str]:
 
8
  set_seed(42)
9
  TRAINED_CASUAL_MODEL = "LunaticMaestro/gpt2-book-summary-generator"
10
 
11
+ import gradio as gr
12
 
13
+ if gr.NO_RELOAD:
14
+ generator_model = pipeline('text-generation', model=TRAINED_CASUAL_MODEL)
15
 
16
 
17
  def generate_summaries(book_title: str, genre: Optional[str] = None, n_samples=2, top_k = 50, top_p = 0.85, ) -> list[str]: