let's see
Browse files
components/generators/daily_feed.py
CHANGED
@@ -9,6 +9,7 @@ import redis
|
|
9 |
from typing import List, Dict
|
10 |
from llama_index.core import VectorStoreIndex
|
11 |
from llama_index.core.query_engine import RetrieverQueryEngine
|
|
|
12 |
from components.indexers.news_indexer import load_news_index
|
13 |
|
14 |
# Load environment variables
|
@@ -64,8 +65,8 @@ def summarize_topic(docs: List[str], topic: str) -> List[Dict]:
|
|
64 |
return feed
|
65 |
|
66 |
# Main generation pipeline
|
67 |
-
def generate_and_cache_daily_feed():
|
68 |
-
index
|
69 |
query_engine = RetrieverQueryEngine.from_args(index)
|
70 |
|
71 |
final_feed = []
|
|
|
9 |
from typing import List, Dict
|
10 |
from llama_index.core import VectorStoreIndex
|
11 |
from llama_index.core.query_engine import RetrieverQueryEngine
|
12 |
+
from llama_index.core.schema import Document
|
13 |
from components.indexers.news_indexer import load_news_index
|
14 |
|
15 |
# Load environment variables
|
|
|
65 |
return feed
|
66 |
|
67 |
# Main generation pipeline
|
68 |
+
def generate_and_cache_daily_feed(documents: List[Document]):
|
69 |
+
index = VectorStoreIndex.from_documents(documents)
|
70 |
query_engine = RetrieverQueryEngine.from_args(index)
|
71 |
|
72 |
final_feed = []
|
pipeline/news_ingest.py
CHANGED
@@ -142,7 +142,7 @@ async def main():
|
|
142 |
get_or_build_index_from_docs(documents)
|
143 |
|
144 |
print("⚡ Generating daily feed...")
|
145 |
-
await generate_and_cache_daily_feed()
|
146 |
|
147 |
print(f"✅ Indexed, headlines generated, and stored at: {INDEX_DIR}")
|
148 |
|
|
|
142 |
get_or_build_index_from_docs(documents)
|
143 |
|
144 |
print("⚡ Generating daily feed...")
|
145 |
+
await generate_and_cache_daily_feed(documents)
|
146 |
|
147 |
print(f"✅ Indexed, headlines generated, and stored at: {INDEX_DIR}")
|
148 |
|