ragV98 commited on
Commit
e2dc38b
·
1 Parent(s): 0820a6b

rebuild trigger

Browse files
components/generators/daily_feed.py CHANGED
@@ -31,7 +31,7 @@ TOPIC_KEYS = [t.lower().replace(" news", "") for t in TOPICS]
31
  # Removed the "numbered headline" instruction as we want LLM to just give plain headlines
32
  BASE_PROMPT = (
33
  "You are Nuse’s editorial summarizer. Read the excerpts below and extract the most important stories. "
34
- "Return up to 3 punchy headlines, each under 20 words. Each headline should be followed by a short explanation of why the story matters."
35
  "Don't include unnecessary text like 'this is important because' or 'why this matters because..' just state the logic and nothing else."
36
  )
37
 
@@ -41,7 +41,7 @@ def load_docs_by_topic_with_refs() -> Dict[str, List[Dict]]:
41
  logging.info("Starting to load documents by topic from Upstash Vector Store...")
42
  try:
43
  vector_store = get_upstash_vector_store()
44
- for full_topic_name, topic_key in zip(TOPICS, TOPIC_KEYS):
45
  filters = MetadataFilters(
46
  filters=[MetadataFilter(key="topic", value=topic_key, operator=FilterOperator.EQ)]
47
  )
@@ -197,7 +197,7 @@ def get_cached_daily_feed():
197
  logging.info(f"ℹ️ No cached data found under key '{cache_key}'.")
198
  return []
199
  except Exception as e:
200
- logging.error(f"❌ [get_cached_daily_feed Error]: {e}", exc_info=True)
201
  return []
202
 
203
  # 🧪 Run if main
 
31
  # Removed the "numbered headline" instruction as we want LLM to just give plain headlines
32
  BASE_PROMPT = (
33
  "You are Nuse’s editorial summarizer. Read the excerpts below and extract the most important stories. "
34
+ "Return up to 5 punchy headlines, each under 20 words. Each headline should be followed by a short explanation of why the story matters."
35
  "Don't include unnecessary text like 'this is important because' or 'why this matters because..' just state the logic and nothing else."
36
  )
37
 
 
41
  logging.info("Starting to load documents by topic from Upstash Vector Store...")
42
  try:
43
  vector_store = get_upstash_vector_store()
44
+ for topic_key in zip(TOPICS, TOPIC_KEYS):
45
  filters = MetadataFilters(
46
  filters=[MetadataFilter(key="topic", value=topic_key, operator=FilterOperator.EQ)]
47
  )
 
197
  logging.info(f"ℹ️ No cached data found under key '{cache_key}'.")
198
  return []
199
  except Exception as e:
200
+ logging.error(f"❌ [get_cached_daily_feed error]: {e}", exc_info=True)
201
  return []
202
 
203
  # 🧪 Run if main