saq1b commited on
Commit
4e4b096
·
verified ·
1 Parent(s): 4b82132

Upload main.py

Browse files
Files changed (1) hide show
  1. main.py +184 -4
main.py CHANGED
@@ -40,6 +40,8 @@ app.add_middleware(
40
  SCAMMER_DWC_SPREADSHEET_ID = os.getenv('SCAMMER_DWC_SPREADSHEET_ID', '1sgkhBNGw_r6tBIxvdeXaI0bVmWBeACN4jiw_oDEeXLw')
41
  # Spreadsheet containing Value lists and Dupe list
42
  VALUES_DUPE_SPREADSHEET_ID = os.getenv('VALUES_DUPE_SPREADSHEET_ID', '1Toe07o3P517q8sm9Qb1e5xyFWCuwgskj71IKJwJNfNU')
 
 
43
 
44
  SCOPES = ['https://www.googleapis.com/auth/spreadsheets.readonly']
45
 
@@ -63,6 +65,12 @@ CATEGORIES = [
63
  ]
64
  VALUES_RANGE = 'B6:R' # Range within each category sheet including column R for lastUpdated
65
 
 
 
 
 
 
 
66
  # Cache Update Interval
67
  CACHE_UPDATE_INTERVAL_SECONDS = 60
68
 
@@ -84,6 +92,8 @@ cache = {
84
  "dwc": [],
85
  "trusted": [], # New cache key for trusted entries
86
  "dupes": [], # List of duped usernames
 
 
87
  "last_updated": None, # Timestamp of the last successful/partial update
88
  "is_ready": False, # Is the cache populated at least once?
89
  "service_available": True # Is the Google Sheets service reachable?
@@ -449,6 +459,57 @@ def process_dupe_list_data(values): # For Dupe List Sheet
449
  processed_dupes.append(username)
450
  return processed_dupes
451
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
452
 
453
  # --- Async Fetching Functions ---
454
 
@@ -566,6 +627,8 @@ async def update_cache_periodically():
566
  "dwc": [],
567
  "trusted": [], # Add trusted key
568
  "dupes": [],
 
 
569
  }
570
  current_errors = {} # Track errors for specific fetches/sheets
571
 
@@ -590,6 +653,20 @@ async def update_cache_periodically():
590
  TRUSTED_SHEET: "trusted", # Add trusted target key
591
  }
592
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
593
  values_dupes_ranges = [f"{quote_sheet_name(DUPE_LIST_SHEET)}!{DUPE_LIST_RANGE}"]
594
  values_dupes_ranges.extend([f"{quote_sheet_name(cat)}!{VALUES_RANGE}" for cat in CATEGORIES])
595
 
@@ -609,6 +686,23 @@ async def update_cache_periodically():
609
  # Add delay between sheet updates
610
  await asyncio.sleep(SHEET_UPDATE_DELAY_SECONDS)
611
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
612
  # Now fetch values/dupes batch
613
  fetch_tasks = {
614
  "values_dupes_batch": fetch_batch_ranges_async(
@@ -625,6 +719,7 @@ async def update_cache_periodically():
625
 
626
  # --- Process Results ---
627
  raw_scammer_dwc_results = None
 
628
  raw_values_dupes_results = None
629
 
630
  for i, result in enumerate(results):
@@ -637,6 +732,8 @@ async def update_cache_periodically():
637
  else:
638
  if key == "scammer_dwc_batch":
639
  raw_scammer_dwc_results = result
 
 
640
  elif key == "values_dupes_batch":
641
  raw_values_dupes_results = result
642
 
@@ -667,6 +764,32 @@ async def update_cache_periodically():
667
  else:
668
  logger.warning("Skipping Scammer/DWC processing due to fetch error.")
669
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
670
 
671
  # --- Process Values/Dupes Results ---
672
  if raw_values_dupes_results is not None:
@@ -702,10 +825,12 @@ async def update_cache_periodically():
702
  if not current_errors.get("scammer_dwc_batch") and \
703
  not current_errors.get("process_user_scammers") and \
704
  not current_errors.get("process_dwc") and \
705
- not current_errors.get("process_trusted"): # Check trusted processing too
 
 
706
  logger.info("Fetching Roblox avatars for newly processed data...")
707
  avatar_tasks = []
708
- entries_needing_avatars = new_cache_data.get("user_scammers", []) + new_cache_data.get("dwc", []) + new_cache_data.get("trusted", []) # Include trusted list
709
  for entry in entries_needing_avatars:
710
  if entry.get('roblox_username'):
711
  avatar_tasks.append(fetch_avatar_for_entry_update(session, entry))
@@ -713,8 +838,7 @@ async def update_cache_periodically():
713
  await asyncio.gather(*avatar_tasks) # Exceptions logged within helper
714
  logger.info(f"Finished fetching avatars for {len(avatar_tasks)} potential new entries.")
715
  else:
716
- logger.warning("Skipping avatar fetching due to errors in fetching/processing scammer/dwc/trusted data.")
717
-
718
 
719
  # --- Change Detection & Webhook Preparation (ONLY if cache is ready) ---
720
  current_time = datetime.now(timezone.utc)
@@ -908,6 +1032,8 @@ async def update_cache_periodically():
908
  cache["dwc"] = new_cache_data["dwc"]
909
  cache["trusted"] = new_cache_data["trusted"]
910
  cache["dupes"] = new_cache_data["dupes"]
 
 
911
  cache["value_changes"] = detected_value_changes_for_api # Store the detected changes
912
  cache["last_updated"] = current_time
913
  if can_set_ready:
@@ -953,6 +1079,20 @@ async def update_cache_periodically():
953
  else:
954
  logger.warning("Skipping update for 'user_scammers', 'server_scammers', 'dwc', 'trusted' due to batch fetch error.")
955
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
956
  if update_occurred:
957
  cache["last_updated"] = current_time # Mark partial update time
958
  # Mark cache ready only if it was *already* ready and we managed a partial update
@@ -1053,6 +1193,8 @@ async def get_status():
1053
  "dwc_entries": len(cache.get("dwc", [])),
1054
  "trusted_entries": len(cache.get("trusted", [])), # Add trusted count
1055
  "duped_usernames": len(cache.get("dupes", [])),
 
 
1056
  },
1057
  "value_change_categories_in_last_cycle": len(cache.get("value_changes", {}))
1058
  }
@@ -1118,6 +1260,44 @@ async def get_dupes():
1118
  return {"usernames": cache.get("dupes", [])} # Return empty list if not ready or empty
1119
 
1120
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1121
  class UsernameCheck(BaseModel):
1122
  username: str
1123
 
 
40
  SCAMMER_DWC_SPREADSHEET_ID = os.getenv('SCAMMER_DWC_SPREADSHEET_ID', '1sgkhBNGw_r6tBIxvdeXaI0bVmWBeACN4jiw_oDEeXLw')
41
  # Spreadsheet containing Value lists and Dupe list
42
  VALUES_DUPE_SPREADSHEET_ID = os.getenv('VALUES_DUPE_SPREADSHEET_ID', '1Toe07o3P517q8sm9Qb1e5xyFWCuwgskj71IKJwJNfNU')
43
+ # New spreadsheet for Teams and Servers data
44
+ TEAMS_SERVERS_SPREADSHEET_ID = os.getenv('TEAMS_SERVERS_SPREADSHEET_ID', '1HWnvzPd4AtRdAroXnwQaN4Dcpj6lZbtnCBavqiAxokQ')
45
 
46
  SCOPES = ['https://www.googleapis.com/auth/spreadsheets.readonly']
47
 
 
65
  ]
66
  VALUES_RANGE = 'B6:R' # Range within each category sheet including column R for lastUpdated
67
 
68
+ # New Sheet Names and Ranges within TEAMS_SERVERS_SPREADSHEET_ID
69
+ TEAMS_SHEET = "Our Teams Page"
70
+ TEAMS_RANGE = "B6:K"
71
+ SERVERS_SHEET = "Private Server Links"
72
+ SERVERS_RANGE = "B6:F"
73
+
74
  # Cache Update Interval
75
  CACHE_UPDATE_INTERVAL_SECONDS = 60
76
 
 
92
  "dwc": [],
93
  "trusted": [], # New cache key for trusted entries
94
  "dupes": [], # List of duped usernames
95
+ "teams": [], # New cache key for teams data
96
+ "servers": [], # New cache key for servers data
97
  "last_updated": None, # Timestamp of the last successful/partial update
98
  "is_ready": False, # Is the cache populated at least once?
99
  "service_available": True # Is the Google Sheets service reachable?
 
459
  processed_dupes.append(username)
460
  return processed_dupes
461
 
462
+ def process_teams_data(values): # For Teams Sheet
463
+ if not values: return []
464
+ processed_data = []
465
+ for row in values: # Expected range like B6:K
466
+ if not row or len(row) < 2: continue
467
+ # Indices based on B6:K (0-indexed from B)
468
+ discord_username = clean_string_optional(row[0]) if len(row) > 0 else None # Col B
469
+ discord_user_id = clean_string_optional(row[1]) if len(row) > 1 else None # Col C
470
+ # Skip if both identifiers are missing
471
+ if not discord_username and not discord_user_id: continue
472
+ # Skip if it looks like a header row
473
+ if str(discord_username).lower() == 'discord username' or str(discord_user_id).lower() == 'discord user id':
474
+ continue
475
+ processed_item = {
476
+ 'discord_username': discord_username,
477
+ 'discord_user_id': discord_user_id,
478
+ 'position': clean_string(row[2]) if len(row) > 2 else 'N/A', # Col D
479
+ 'second_position': clean_string_optional(row[3]) if len(row) > 3 else None, # Col E
480
+ 'third_position': clean_string_optional(row[4]) if len(row) > 4 else None, # Col F
481
+ 'roblox_username': clean_string_optional(row[5]) if len(row) > 5 else None, # Col G
482
+ 'discord_server_link': clean_string_optional(row[6]) if len(row) > 6 else None, # Col H
483
+ 'custom_bio': clean_string_optional(row[7]) if len(row) > 7 else None, # Col I
484
+ 'likes': int(row[8]) if len(row) > 8 and row[8] and str(row[8]).isdigit() else 0, # Col J
485
+ 'last_updated': clean_string_optional(row[9]) if len(row) > 9 else None, # Col K
486
+ 'roblox_avatar_url': None # Will be filled later
487
+ }
488
+ processed_data.append(processed_item)
489
+ return processed_data
490
+
491
+ def process_servers_data(values): # For Servers Sheet
492
+ if not values: return []
493
+ processed_data = []
494
+ for row in values: # Expected range like B6:F
495
+ if not row or len(row) < 1: continue
496
+ # Indices based on B6:F (0-indexed from B)
497
+ server_link = clean_string_optional(row[0]) if len(row) > 0 else None # Col B
498
+ # Skip if server link is missing
499
+ if not server_link: continue
500
+ # Skip if it looks like a header row
501
+ if str(server_link).lower() == 'server link':
502
+ continue
503
+ processed_item = {
504
+ 'server_link': server_link,
505
+ 'server_owner': clean_string(row[1]) if len(row) > 1 else 'N/A', # Col C
506
+ 'expires': clean_string_optional(row[2]) if len(row) > 2 else None, # Col D
507
+ 'rules': clean_string_optional(row[3]) if len(row) > 3 else None, # Col E
508
+ 'extra_notes': clean_string_optional(row[4]) if len(row) > 4 else None # Col F
509
+ }
510
+ processed_data.append(processed_item)
511
+ return processed_data
512
+
513
 
514
  # --- Async Fetching Functions ---
515
 
 
627
  "dwc": [],
628
  "trusted": [], # Add trusted key
629
  "dupes": [],
630
+ "teams": [], # Add teams key
631
+ "servers": [], # Add servers key
632
  }
633
  current_errors = {} # Track errors for specific fetches/sheets
634
 
 
653
  TRUSTED_SHEET: "trusted", # Add trusted target key
654
  }
655
 
656
+ # Define ranges for teams and servers
657
+ teams_servers_ranges = [
658
+ f"{quote_sheet_name(TEAMS_SHEET)}!{TEAMS_RANGE}",
659
+ f"{quote_sheet_name(SERVERS_SHEET)}!{SERVERS_RANGE}",
660
+ ]
661
+ teams_servers_processor_map = {
662
+ TEAMS_SHEET: process_teams_data,
663
+ SERVERS_SHEET: process_servers_data,
664
+ }
665
+ teams_servers_target_key_map = {
666
+ TEAMS_SHEET: "teams",
667
+ SERVERS_SHEET: "servers",
668
+ }
669
+
670
  values_dupes_ranges = [f"{quote_sheet_name(DUPE_LIST_SHEET)}!{DUPE_LIST_RANGE}"]
671
  values_dupes_ranges.extend([f"{quote_sheet_name(cat)}!{VALUES_RANGE}" for cat in CATEGORIES])
672
 
 
686
  # Add delay between sheet updates
687
  await asyncio.sleep(SHEET_UPDATE_DELAY_SECONDS)
688
 
689
+ # Now fetch teams/servers batch
690
+ fetch_tasks = {
691
+ "teams_batch": fetch_batch_ranges_async(
692
+ TEAMS_SERVERS_SPREADSHEET_ID,
693
+ teams_servers_ranges,
694
+ value_render_option='FORMATTED_VALUE'
695
+ )
696
+ }
697
+
698
+ # Execute teams/servers batch
699
+ teams_results = await asyncio.gather(*fetch_tasks.values(), return_exceptions=True)
700
+ task_keys.extend(list(fetch_tasks.keys()))
701
+ results.extend(teams_results)
702
+
703
+ # Add delay between sheet updates
704
+ await asyncio.sleep(SHEET_UPDATE_DELAY_SECONDS)
705
+
706
  # Now fetch values/dupes batch
707
  fetch_tasks = {
708
  "values_dupes_batch": fetch_batch_ranges_async(
 
719
 
720
  # --- Process Results ---
721
  raw_scammer_dwc_results = None
722
+ raw_teams_servers_results = None
723
  raw_values_dupes_results = None
724
 
725
  for i, result in enumerate(results):
 
732
  else:
733
  if key == "scammer_dwc_batch":
734
  raw_scammer_dwc_results = result
735
+ elif key == "teams_batch":
736
+ raw_teams_servers_results = result
737
  elif key == "values_dupes_batch":
738
  raw_values_dupes_results = result
739
 
 
764
  else:
765
  logger.warning("Skipping Scammer/DWC processing due to fetch error.")
766
 
767
+ # --- Process Teams/Servers Results ---
768
+ if raw_teams_servers_results is not None:
769
+ logger.info(f"Processing {len(raw_teams_servers_results)} valueRanges from Teams/Servers sheet...")
770
+ for vr in raw_teams_servers_results:
771
+ range_str = vr.get('range', '')
772
+ match = re.match(r"^'?([^'!]+)'?!", range_str)
773
+ if not match:
774
+ logger.warning(f"Could not extract sheet name from range '{range_str}' in Teams/Servers response.")
775
+ continue
776
+ sheet_name = match.group(1).replace("''", "'")
777
+
778
+ if sheet_name in teams_servers_processor_map:
779
+ processor = teams_servers_processor_map[sheet_name]
780
+ target_key = teams_servers_target_key_map[sheet_name]
781
+ values = vr.get('values', [])
782
+ try:
783
+ processed_data = processor(values)
784
+ new_cache_data[target_key] = processed_data # Store fetched data temporarily
785
+ logger.info(f"Processed {len(processed_data)} items for {sheet_name} -> {target_key}")
786
+ except Exception as e:
787
+ logger.error(f"Error processing data for {sheet_name} using {processor.__name__}: {e}", exc_info=True)
788
+ current_errors[f"process_{target_key}"] = str(e)
789
+ else:
790
+ logger.warning(f"No processor found for sheet name '{sheet_name}' derived from range '{range_str}' in Teams/Servers sheet.")
791
+ else:
792
+ logger.warning("Skipping Teams/Servers processing due to fetch error.")
793
 
794
  # --- Process Values/Dupes Results ---
795
  if raw_values_dupes_results is not None:
 
825
  if not current_errors.get("scammer_dwc_batch") and \
826
  not current_errors.get("process_user_scammers") and \
827
  not current_errors.get("process_dwc") and \
828
+ not current_errors.get("process_trusted") and \
829
+ not current_errors.get("teams_batch") and \
830
+ not current_errors.get("process_teams"): # Check teams processing too
831
  logger.info("Fetching Roblox avatars for newly processed data...")
832
  avatar_tasks = []
833
+ entries_needing_avatars = new_cache_data.get("user_scammers", []) + new_cache_data.get("dwc", []) + new_cache_data.get("trusted", []) + new_cache_data.get("teams", []) # Include teams list
834
  for entry in entries_needing_avatars:
835
  if entry.get('roblox_username'):
836
  avatar_tasks.append(fetch_avatar_for_entry_update(session, entry))
 
838
  await asyncio.gather(*avatar_tasks) # Exceptions logged within helper
839
  logger.info(f"Finished fetching avatars for {len(avatar_tasks)} potential new entries.")
840
  else:
841
+ logger.warning("Skipping avatar fetching due to errors in fetching/processing scammer/dwc/trusted/teams data.")
 
842
 
843
  # --- Change Detection & Webhook Preparation (ONLY if cache is ready) ---
844
  current_time = datetime.now(timezone.utc)
 
1032
  cache["dwc"] = new_cache_data["dwc"]
1033
  cache["trusted"] = new_cache_data["trusted"]
1034
  cache["dupes"] = new_cache_data["dupes"]
1035
+ cache["teams"] = new_cache_data["teams"]
1036
+ cache["servers"] = new_cache_data["servers"]
1037
  cache["value_changes"] = detected_value_changes_for_api # Store the detected changes
1038
  cache["last_updated"] = current_time
1039
  if can_set_ready:
 
1079
  else:
1080
  logger.warning("Skipping update for 'user_scammers', 'server_scammers', 'dwc', 'trusted' due to batch fetch error.")
1081
 
1082
+ # Update teams and servers sections if their batch succeeded AND processing succeeded
1083
+ if "teams_batch" not in current_errors:
1084
+ for key in ["teams", "servers"]:
1085
+ process_error_key = f"process_{key}"
1086
+ if process_error_key not in current_errors:
1087
+ if cache.get(key) != new_cache_data[key]:
1088
+ cache[key] = new_cache_data[key]
1089
+ partial_update_details.append(key)
1090
+ update_occurred = True
1091
+ else:
1092
+ logger.warning(f"Skipping update for '{key}' due to processing error.")
1093
+ else:
1094
+ logger.warning("Skipping update for 'teams' and 'servers' due to batch fetch error.")
1095
+
1096
  if update_occurred:
1097
  cache["last_updated"] = current_time # Mark partial update time
1098
  # Mark cache ready only if it was *already* ready and we managed a partial update
 
1193
  "dwc_entries": len(cache.get("dwc", [])),
1194
  "trusted_entries": len(cache.get("trusted", [])), # Add trusted count
1195
  "duped_usernames": len(cache.get("dupes", [])),
1196
+ "teams": len(cache.get("teams", [])),
1197
+ "servers": len(cache.get("servers", [])),
1198
  },
1199
  "value_change_categories_in_last_cycle": len(cache.get("value_changes", {}))
1200
  }
 
1260
  return {"usernames": cache.get("dupes", [])} # Return empty list if not ready or empty
1261
 
1262
 
1263
+ @app.get("/api/teams")
1264
+ async def get_teams():
1265
+ """Get all teams data from cache"""
1266
+ check_cache_readiness()
1267
+ return {"members": cache.get("teams", [])}
1268
+
1269
+
1270
+ @app.get("/api/servers")
1271
+ async def get_servers():
1272
+ """Get all private servers data from cache"""
1273
+ check_cache_readiness()
1274
+ return {"servers": cache.get("servers", [])}
1275
+
1276
+
1277
+ class TeamMemberLike(BaseModel):
1278
+ discord_user_id: str
1279
+
1280
+
1281
+ @app.post("/api/teams/like")
1282
+ async def like_team_member(data: TeamMemberLike):
1283
+ """Increment like count for a team member (to be implemented with spreadsheet update)"""
1284
+ check_cache_readiness()
1285
+
1286
+ # Find the team member by discord_user_id
1287
+ discord_user_id = data.discord_user_id.strip()
1288
+ teams = cache.get("teams", [])
1289
+
1290
+ for member in teams:
1291
+ if member.get("discord_user_id") == discord_user_id:
1292
+ # In a real implementation, this would update the spreadsheet
1293
+ # For now, just update the cache
1294
+ member["likes"] = member.get("likes", 0) + 1
1295
+ member["last_updated"] = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S")
1296
+ return {"success": True, "likes": member["likes"]}
1297
+
1298
+ raise HTTPException(status_code=404, detail=f"Team member with discord_user_id {discord_user_id} not found")
1299
+
1300
+
1301
  class UsernameCheck(BaseModel):
1302
  username: str
1303