Omkar008 commited on
Commit
8aa59e6
·
verified ·
1 Parent(s): 9f88b7d

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +30 -28
main.py CHANGED
@@ -20,6 +20,36 @@ url: str = os.getenv('SUPABASE_URL')
20
  key: str = os.getenv('SUPABASE_KEY')
21
  supabase: Client = create_client(url, key)
22
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
  async def process_batch_job(dataset: Dict[str, Any], batch_job_id: str):
24
  """
25
  Background task to process the batch job
@@ -101,31 +131,3 @@ async def process_batch_job(dataset: Dict[str, Any], batch_job_id: str):
101
  "error": str(e),
102
  "completed_at": datetime.utcnow().isoformat()
103
  }).eq({"batch_job_id": batch_job_id}).execute()
104
-
105
- @app.post("/test/v1")
106
- async def testv1(request: Request, background_tasks: BackgroundTasks):
107
- try:
108
- dataset = await request.json()
109
-
110
- # Create initial batch job record
111
- save_data = {
112
- 'batch_job_id': f"batch_{datetime.utcnow().strftime('%Y%m%d_%H%M%S')}",
113
- "batch_job_status": False,
114
- "created_at": datetime.utcnow().isoformat()
115
- }
116
-
117
- response = (
118
- supabase.table("batch_processing_details")
119
- .insert(save_data)
120
- .execute()
121
- )
122
-
123
- # Add processing to background tasks
124
- background_tasks.add_task(process_batch_job, dataset, save_data['batch_job_id'])
125
-
126
- return {'data': 'Batch job is scheduled!', 'batch_job_id': save_data['batch_job_id']},
127
-
128
-
129
- except Exception as e:
130
- return {'error': str(e)}
131
-
 
20
  key: str = os.getenv('SUPABASE_KEY')
21
  supabase: Client = create_client(url, key)
22
 
23
+
24
+ @app.post("/send/batch_processing")
25
+ async def testv1(request: Request, background_tasks: BackgroundTasks):
26
+ try:
27
+ dataset = await request.json()
28
+
29
+ # Create initial batch job record
30
+ save_data = {
31
+ 'batch_job_id': f"batch_{datetime.utcnow().strftime('%Y%m%d_%H%M%S')}",
32
+ "batch_job_status": False,
33
+ "created_at": datetime.utcnow().isoformat()
34
+ }
35
+
36
+ response = (
37
+ supabase.table("batch_processing_details")
38
+ .insert(save_data)
39
+ .execute()
40
+ )
41
+
42
+ # Add processing to background tasks
43
+ background_tasks.add_task(process_batch_job, dataset, save_data['batch_job_id'])
44
+
45
+ return {'data': 'Batch job is scheduled!', 'batch_job_id': save_data['batch_job_id']},
46
+
47
+
48
+ except Exception as e:
49
+ return {'error': str(e)}
50
+
51
+
52
+
53
  async def process_batch_job(dataset: Dict[str, Any], batch_job_id: str):
54
  """
55
  Background task to process the batch job
 
131
  "error": str(e),
132
  "completed_at": datetime.utcnow().isoformat()
133
  }).eq({"batch_job_id": batch_job_id}).execute()