Spaces:
Running
Running
updating requirements.txt implementing async database
Browse files- app/routers/analyze.py +4 -4
- requirements.txt +3 -3
app/routers/analyze.py
CHANGED
@@ -18,10 +18,10 @@ router = APIRouter(tags=["analysis"])
|
|
18 |
scraper = ArticleScraper()
|
19 |
scorer = MediaScorer()
|
20 |
|
21 |
-
# Initialize Supabase connection
|
22 |
SUPABASE_URL = os.getenv("SUPABASE_URL")
|
23 |
SUPABASE_KEY = os.getenv("SUPABASE_KEY")
|
24 |
-
supabase = create_client(SUPABASE_URL, SUPABASE_KEY)
|
25 |
|
26 |
class ArticleRequest(BaseModel):
|
27 |
url: HttpUrl
|
@@ -65,7 +65,7 @@ async def analyze_article(request: ArticleRequest) -> AnalysisResponse:
|
|
65 |
logger.info(f"Analyzing article: {request.url}")
|
66 |
|
67 |
# Check if the article has already been analyzed
|
68 |
-
existing_article = supabase.table('article_analysis').select('*').eq('url', str(request.url)).execute()
|
69 |
|
70 |
if existing_article.status_code == 200 and existing_article.data:
|
71 |
logger.info("Article already analyzed. Returning cached data.")
|
@@ -127,7 +127,7 @@ async def analyze_article(request: ArticleRequest) -> AnalysisResponse:
|
|
127 |
}
|
128 |
|
129 |
# Save the new analysis to Supabase
|
130 |
-
supabase.table('article_analysis').upsert({
|
131 |
'url': str(request.url),
|
132 |
'headline': response_dict['headline'],
|
133 |
'content': response_dict['content'],
|
|
|
18 |
scraper = ArticleScraper()
|
19 |
scorer = MediaScorer()
|
20 |
|
21 |
+
# Initialize Supabase connection (works for async environments)
|
22 |
SUPABASE_URL = os.getenv("SUPABASE_URL")
|
23 |
SUPABASE_KEY = os.getenv("SUPABASE_KEY")
|
24 |
+
supabase = create_client(SUPABASE_URL, SUPABASE_KEY) # This works for async
|
25 |
|
26 |
class ArticleRequest(BaseModel):
|
27 |
url: HttpUrl
|
|
|
65 |
logger.info(f"Analyzing article: {request.url}")
|
66 |
|
67 |
# Check if the article has already been analyzed
|
68 |
+
existing_article = await supabase.table('article_analysis').select('*').eq('url', str(request.url)).execute()
|
69 |
|
70 |
if existing_article.status_code == 200 and existing_article.data:
|
71 |
logger.info("Article already analyzed. Returning cached data.")
|
|
|
127 |
}
|
128 |
|
129 |
# Save the new analysis to Supabase
|
130 |
+
await supabase.table('article_analysis').upsert({
|
131 |
'url': str(request.url),
|
132 |
'headline': response_dict['headline'],
|
133 |
'content': response_dict['content'],
|
requirements.txt
CHANGED
@@ -11,6 +11,6 @@ torch==2.1.2
|
|
11 |
numpy==1.26.3
|
12 |
pytest==7.4.3
|
13 |
pytest-asyncio==0.21.1
|
14 |
-
httpx==0.25.2
|
15 |
-
supabase
|
16 |
-
configparser>=6.0.0
|
|
|
11 |
numpy==1.26.3
|
12 |
pytest==7.4.3
|
13 |
pytest-asyncio==0.21.1
|
14 |
+
httpx==0.25.2 # Ensure you're using the version you need, or consider downgrading if needed
|
15 |
+
supabase-py # Pin the working version, or remove version constraints entirely
|
16 |
+
configparser>=6.0.0
|