jvc-api / main.py
saq1b's picture
Update main.py
53e4005 verified
raw
history blame
38.3 kB
# --- START OF FILE main.py ---
# main.py
from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
from typing import Optional, Any, Dict, List
import aiohttp
import os
from datetime import datetime, timezone
import json
import re
from google.oauth2.service_account import Credentials as ServiceAccountCredentials
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
from dotenv import load_dotenv
import asyncio
import logging
# --- Logging Setup ---
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
app = FastAPI()
# --- Configuration ---
load_dotenv()
# CORS
app.add_middleware(
CORSMiddleware,
allow_origins=["*"], # Consider restricting in production
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Google Sheets Config
# Spreadsheet containing Scammer and DWC info
SCAMMER_DWC_SPREADSHEET_ID = '1sgkhBNGw_r6tBIxvdeXaI0bVmWBeACN4jiw_oDEeXLw'
# Spreadsheet containing Value lists and Dupe list
VALUES_DUPE_SPREADSHEET_ID = '1Toe07o3P517q8sm9Qb1e5xyFWCuwgskj71IKJwJNfNU'
SCOPES = ['https://www.googleapis.com/auth/spreadsheets.readonly']
# Sheet Names and Ranges within SCAMMER_DWC_SPREADSHEET_ID
USER_SCAMMER_SHEET = "User Scammer Files"
USER_SCAMMER_RANGE = "B6:G"
SERVER_SCAMMER_SHEET = "Server Scammer Files"
SERVER_SCAMMER_RANGE = "B6:F"
DWC_SHEET = "DWC Servers / Users"
DWC_RANGE = "B6:G"
# Sheet Names and Ranges within VALUES_DUPE_SPREADSHEET_ID
DUPE_LIST_SHEET = "Dupe List"
DUPE_LIST_RANGE = "B2:B"
# Value Categories (Sheet Names)
CATEGORIES = [
"Vehicles", "Textures", "Colours", "Spoilers",
"Rims", "Furnitures", "Gun Skins", "Hyperchromes"
]
VALUES_RANGE = 'B6:P' # Range within each category sheet
# Cache Update Interval
CACHE_UPDATE_INTERVAL_SECONDS = 60 * 5 # 5 minutes
# --- Global Cache ---
cache = {
"values": {}, # Dict mapping category name to list of items
"value_changes": {}, # Dict mapping category name to list of changes
"user_scammers": [],
"server_scammers": [],
"dwc": [],
"dupes": [], # List of duped usernames
"last_updated": None, # Timestamp of the last successful/partial update
"is_ready": False, # Is the cache populated at least once?
"service_available": True # Is the Google Sheets service reachable?
}
# --- Google Sheets Initialization ---
sheets_service = None # Initialize as None
def quote_sheet_name(name: str) -> str:
"""Adds single quotes around a sheet name if it needs them."""
if not name:
return "''"
# Simple check: if it contains spaces or non-alphanumeric chars (excluding _)
if not re.match(r"^[a-zA-Z0-9_]+$", name):
# Escape existing single quotes within the name
escaped_name = name.replace("'", "''")
return f"'{escaped_name}'"
return name
def init_google_sheets(scopes=SCOPES):
"""Initialize Google Sheets credentials from environment variable"""
global sheets_service, cache
try:
creds_json_str = os.getenv('CREDENTIALS_JSON')
if not creds_json_str:
logger.error("CREDENTIALS_JSON environment variable not found")
raise ValueError("CREDENTIALS_JSON environment variable not found")
creds_json = json.loads(creds_json_str)
creds = ServiceAccountCredentials.from_service_account_info(
creds_json,
scopes=scopes
)
sheets_service = build('sheets', 'v4', credentials=creds, cache_discovery=False) # Disable discovery cache
logger.info("Google Sheets service initialized successfully from ENV VAR.")
cache["service_available"] = True
return sheets_service
except Exception as e:
logger.error(f"Error initializing Google Sheets from ENV VAR: {e}")
# Fallback attempt
try:
logger.info("Falling back to loading credentials from file 'credentials.json'")
creds = ServiceAccountCredentials.from_service_account_file(
'credentials.json',
scopes=scopes
)
sheets_service = build('sheets', 'v4', credentials=creds, cache_discovery=False)
logger.info("Google Sheets service initialized successfully from file.")
cache["service_available"] = True
return sheets_service
except Exception as file_e:
logger.error(f"Error loading credentials from file: {file_e}")
logger.critical("Google Sheets service could not be initialized. API will be limited.")
cache["service_available"] = False
sheets_service = None
return None
# Initialize on module load
init_google_sheets()
# --- Helper Functions (Data Extraction & Formatting) ---
def extract_drive_id(url):
if not url or not isinstance(url, str): return None
match = re.search(r'https://drive\.google\.com/file/d/([^/]+)', url)
return match.group(1) if match else None
def convert_to_thumbnail_url(drive_url):
drive_id = extract_drive_id(drive_url)
return f"https://drive.google.com/thumbnail?id={drive_id}&sz=w1000" if drive_id else drive_url
def extract_image_url(formula, drive_url=None):
# Priority to explicit drive_url if provided
if drive_url and isinstance(drive_url, str) and 'drive.google.com' in drive_url:
return convert_to_thumbnail_url(drive_url)
if not formula or not isinstance(formula, str): return ''
# Handle direct URLs
if formula.startswith('http://') or formula.startswith('https://'):
return formula
# Handle =IMAGE("...") formula
if formula.startswith('=IMAGE('):
match = re.search(r'=IMAGE\("([^"]+)"', formula)
if match: return match.group(1)
# If it wasn't a formula or direct URL, and no drive_url, return empty or original?
# Let's assume if it's not a recognizable URL/formula, it's not an image source.
return '' # Return empty string if no valid URL found
def format_currency(value: Any) -> Optional[str]:
if value is None or str(value).strip() == '': return 'N/A'
try:
num_str = str(value).replace('$', '').replace(',', '').strip()
if not num_str or num_str.lower() == 'n/a': return 'N/A'
num = float(num_str)
return f"${num:,.0f}"
except (ValueError, TypeError):
if isinstance(value, str) and not re.match(r'^-?[\d,.$]+\$?$', value.strip()):
return value.strip() # Return original text if non-numeric-like
return 'N/A'
def parse_cached_currency(value_str: Optional[str]) -> Optional[float]:
if value_str is None or value_str is None or str(value_str).strip().lower() == 'n/a':
return None
try:
num_str = str(value_str).replace('$', '').replace(',', '').strip()
return float(num_str)
except (ValueError, TypeError):
return None
def clean_string(value, default='N/A'):
if value is None: return default
cleaned = str(value).strip()
return cleaned if cleaned else default
def clean_string_optional(value):
if value is None: return None
cleaned = str(value).strip()
return cleaned if cleaned and cleaned != '-' else None
def parse_alt_accounts(value):
if value is None: return []
raw_string = str(value).strip()
if not raw_string or raw_string == '-': return []
return [acc.strip() for acc in raw_string.split(',') if acc.strip()]
# --- Roblox API Helpers (Unchanged) ---
async def get_roblox_user_id(session: aiohttp.ClientSession, username: str):
if not username: return None
url = "https://users.roblox.com/v1/usernames/users"
payload = {"usernames": [username], "excludeBannedUsers": False}
try:
async with session.post(url, json=payload) as response:
if response.status == 200:
data = await response.json()
if data and data.get("data") and len(data["data"]) > 0:
return data["data"][0].get("id")
return None
except asyncio.TimeoutError:
logger.warning(f"Timeout fetching Roblox User ID for {username}")
return None
except aiohttp.ClientError as e:
logger.warning(f"Network error fetching Roblox User ID for {username}: {e}")
return None
except Exception as e:
logger.error(f"Unexpected exception fetching Roblox User ID for {username}: {e}")
return None
async def get_roblox_avatar_url(session: aiohttp.ClientSession, user_id: int):
if not user_id: return None
url = f"https://thumbnails.roblox.com/v1/users/avatar-headshot?userIds={user_id}&size=150x150&format=Png&isCircular=false"
try:
async with session.get(url) as response:
if response.status == 200:
data = await response.json()
if data and data.get("data") and len(data["data"]) > 0:
return data["data"][0].get("imageUrl")
return None
except asyncio.TimeoutError:
logger.warning(f"Timeout fetching Roblox avatar for User ID {user_id}")
return None
except aiohttp.ClientError as e:
logger.warning(f"Network error fetching Roblox avatar for User ID {user_id}: {e}")
return None
except Exception as e:
logger.error(f"Unexpected exception fetching Roblox avatar for User ID {user_id}: {e}")
return None
# --- Data Processing Functions ---
# These functions take raw rows from the sheet and process them.
# They are now independent of *which* sheet they came from, as long as the structure matches.
def process_sheet_data(values): # For Value Categories
if not values: return []
processed_data = []
for row in values: # Expected range like B6:P
if not row or not any(str(cell).strip() for cell in row if cell is not None): continue
# Indices based on B6:P (0-indexed from B)
# B=0, C=1, D=2, E=3, F=4, G=5, H=6, I=7, J=8, K=9, L=10, M=11, N=12, O=13, P=14
icon_formula = row[0] if len(row) > 0 else ''
name = row[2] if len(row) > 2 else 'N/A'
value_raw = row[4] if len(row) > 4 else 'N/A'
duped_value_raw = row[6] if len(row) > 6 else 'N/A'
market_value_raw = row[8] if len(row) > 8 else 'N/A'
demand = row[10] if len(row) > 10 else 'N/A'
notes = row[12] if len(row) > 12 else ''
drive_url = row[14] if len(row) > 14 else None # Column P
# Skip header-like rows (e.g., "LEVEL 1 | HYPERCHROMES" in column F/index 4)
if len(row) > 4 and isinstance(row[4], str) and re.search(r'LEVEL \d+ \|', row[4]):
continue
if clean_string(name) == 'N/A':
continue
processed_item = {
'icon': extract_image_url(icon_formula, drive_url),
'name': clean_string(name, 'N/A'),
'value': format_currency(value_raw),
'dupedValue': format_currency(duped_value_raw),
'marketValue': format_currency(market_value_raw),
'demand': clean_string(demand, 'N/A'),
'notes': clean_string(notes, '')
}
processed_data.append(processed_item)
return processed_data
def process_user_scammer_data(values): # For User Scammer Sheet
if not values: return []
processed_data = []
for row in values: # Expected range like B6:G
if not row or len(row) < 2: continue
# Indices based on B6:G (0-indexed from B)
# B=0, C=1, D=2, E=3, F=4, G=5
discord_id = clean_string_optional(row[0]) if len(row) > 0 else None # Col B
roblox_username = clean_string_optional(row[1]) if len(row) > 1 else None # Col C
if not discord_id and not roblox_username: continue
processed_item = {
'discord_id': discord_id,
'roblox_username': roblox_username,
'scam_type': clean_string(row[2]) if len(row) > 2 else 'N/A', # Col D
'explanation': clean_string(row[3]) if len(row) > 3 else 'N/A', # Col E
'evidence_link': clean_string_optional(row[4]) if len(row) > 4 else None, # Col F
'alt_accounts': parse_alt_accounts(row[5]) if len(row) > 5 else [], # Col G
'roblox_avatar_url': None
}
processed_data.append(processed_item)
return processed_data
def process_server_scammer_data(values): # For Server Scammer Sheet
if not values: return []
processed_data = []
for row in values: # Expected range like B6:F
if not row or len(row) < 2: continue
# Indices based on B6:F (0-indexed from B)
# B=0, C=1, D=2, E=3, F=4
server_id = clean_string_optional(row[0]) if len(row) > 0 else None # Col B
server_name = clean_string_optional(row[1]) if len(row) > 1 else None # Col C
if not server_id and not server_name: continue
processed_item = {
'server_id': server_id,
'server_name': server_name,
'scam_type': clean_string(row[2]) if len(row) > 2 else 'N/A', # Col D
'explanation': clean_string(row[3]) if len(row) > 3 else 'N/A', # Col E
'evidence_link': clean_string_optional(row[4]) if len(row) > 4 else None # Col F
}
processed_data.append(processed_item)
return processed_data
def process_dwc_data(values): # For DWC Sheet
if not values: return []
processed_data = []
for row in values: # Expected range like B6:G
if not row or len(row) < 3: continue
# Indices based on B6:G (0-indexed from B)
# B=0, C=1, D=2, E=3, F=4, G=5
user_id = clean_string_optional(row[0]) if len(row) > 0 else None # Col B
server_id = clean_string_optional(row[1]) if len(row) > 1 else None # Col C
roblox_user = clean_string_optional(row[2]) if len(row) > 2 else None # Col D
if not user_id and not server_id and not roblox_user: continue
processed_item = {
'status': 'DWC',
'discord_user_id': user_id,
'discord_server_id': server_id,
'roblox_username': roblox_user,
'explanation': clean_string(row[3]) if len(row) > 3 else 'N/A', # Col E
'evidence_link': clean_string_optional(row[4]) if len(row) > 4 else None, # Col F
'alt_accounts': parse_alt_accounts(row[5]) if len(row) > 5 else [], # Col G
'roblox_avatar_url': None
}
processed_data.append(processed_item)
return processed_data
def process_dupe_list_data(values): # For Dupe List Sheet
if not values: return []
# Expected range like B2:B
return [row[0].strip().lower() for row in values if row and len(row)>0 and row[0] and isinstance(row[0], str) and row[0].strip()]
# --- Async Fetching Functions ---
async def fetch_batch_ranges_async(spreadsheet_id: str, ranges: List[str], value_render_option: str = 'FORMATTED_VALUE') -> List[Dict]:
"""Async wrapper to fetch multiple ranges using batchGet and return raw valueRanges."""
global sheets_service
if not sheets_service:
logger.warning(f"Attempted batch fetch from {spreadsheet_id} but Sheets service is unavailable.")
raise Exception("Google Sheets service not initialized")
if not ranges:
logger.warning(f"Batch fetch called with empty ranges for {spreadsheet_id}.")
return []
try:
logger.info(f"Fetching batch ranges from {spreadsheet_id}: {ranges}")
loop = asyncio.get_event_loop()
result = await loop.run_in_executor(
None,
lambda: sheets_service.spreadsheets().values().batchGet(
spreadsheetId=spreadsheet_id,
ranges=ranges,
valueRenderOption=value_render_option,
majorDimension='ROWS'
).execute()
)
value_ranges = result.get('valueRanges', [])
logger.info(f"Successfully fetched batch data for {len(value_ranges)} ranges from {spreadsheet_id}.")
return value_ranges # Return the raw list of valueRange objects
except HttpError as e:
error_details = json.loads(e.content).get('error', {})
status = error_details.get('status')
message = error_details.get('message')
logger.error(f"Google API HTTP Error during batch fetch for {spreadsheet_id}: Status={status}, Message={message}")
raise e
except Exception as e:
logger.error(f"Error during batch fetching from {spreadsheet_id} for ranges {ranges}: {e}")
raise e
# --- Background Cache Update Task (Refactored for Batching per Spreadsheet) ---
async def update_cache_periodically():
"""Fetches data using batchGet per spreadsheet, processes, detects changes, and updates cache."""
global cache
async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=25)) as session: # Slightly longer timeout
while True:
if not cache["service_available"]:
logger.info("Attempting to re-initialize Google Sheets service...")
init_google_sheets()
if not cache["service_available"]:
logger.warning("Google Sheets service still unavailable, skipping cache update cycle.")
await asyncio.sleep(CACHE_UPDATE_INTERVAL_SECONDS * 2)
continue
else:
logger.info("Google Sheets service re-initialized. Proceeding with cache update.")
logger.info("Starting cache update cycle...")
start_time = datetime.now(timezone.utc)
# Prepare temporary storage for fetched data
fetched_values_categories = {} # { "CategoryName": [items...] }
new_cache_data = {
"user_scammers": [],
"server_scammers": [],
"dwc": [],
"dupes": [],
}
current_errors = {} # Track errors for specific fetches/sheets
try:
# --- Define Ranges and Processors ---
# Scammer/DWC Spreadsheet
scammer_dwc_ranges = [
f"{quote_sheet_name(USER_SCAMMER_SHEET)}!{USER_SCAMMER_RANGE}",
f"{quote_sheet_name(SERVER_SCAMMER_SHEET)}!{SERVER_SCAMMER_RANGE}",
f"{quote_sheet_name(DWC_SHEET)}!{DWC_RANGE}",
]
scammer_dwc_processor_map = {
USER_SCAMMER_SHEET: process_user_scammer_data,
SERVER_SCAMMER_SHEET: process_server_scammer_data,
DWC_SHEET: process_dwc_data,
}
scammer_dwc_target_key_map = { # Map sheet name to cache key
USER_SCAMMER_SHEET: "user_scammers",
SERVER_SCAMMER_SHEET: "server_scammers",
DWC_SHEET: "dwc",
}
# Values/Dupes Spreadsheet
values_dupes_ranges = [f"{quote_sheet_name(DUPE_LIST_SHEET)}!{DUPE_LIST_RANGE}"]
values_dupes_ranges.extend([f"{quote_sheet_name(cat)}!{VALUES_RANGE}" for cat in CATEGORIES])
# --- Define Fetch Tasks ---
fetch_tasks = {
"scammer_dwc_batch": fetch_batch_ranges_async(
SCAMMER_DWC_SPREADSHEET_ID,
scammer_dwc_ranges,
value_render_option='FORMATTED_VALUE' # These don't need formulas
),
"values_dupes_batch": fetch_batch_ranges_async(
VALUES_DUPE_SPREADSHEET_ID,
values_dupes_ranges,
value_render_option='FORMULA' # Need formula for IMAGE() in values
)
}
# --- Execute Tasks Concurrently ---
results = await asyncio.gather(*fetch_tasks.values(), return_exceptions=True)
task_keys = list(fetch_tasks.keys())
# --- Process Results ---
raw_scammer_dwc_results = None
raw_values_dupes_results = None
for i, result in enumerate(results):
key = task_keys[i]
if isinstance(result, Exception):
logger.error(f"Failed to fetch batch data for {key}: {result}")
current_errors[key] = str(result)
else:
# Store the raw valueRanges list
if key == "scammer_dwc_batch":
raw_scammer_dwc_results = result
elif key == "values_dupes_batch":
raw_values_dupes_results = result
# --- Process Scammer/DWC Results ---
if raw_scammer_dwc_results is not None:
logger.info(f"Processing {len(raw_scammer_dwc_results)} valueRanges from Scammer/DWC sheet...")
for vr in raw_scammer_dwc_results:
range_str = vr.get('range', '')
# Extract sheet name (handle quotes)
match = re.match(r"^'?([^'!]+)'?!", range_str)
if not match:
logger.warning(f"Could not extract sheet name from range '{range_str}' in Scammer/DWC response.")
continue
sheet_name = match.group(1).replace("''", "'") # Unescape quotes
if sheet_name in scammer_dwc_processor_map:
processor = scammer_dwc_processor_map[sheet_name]
target_key = scammer_dwc_target_key_map[sheet_name]
values = vr.get('values', [])
try:
processed_data = processor(values)
new_cache_data[target_key] = processed_data
logger.info(f"Processed {len(processed_data)} items for {sheet_name} -> {target_key}")
except Exception as e:
logger.error(f"Error processing data for {sheet_name} using {processor.__name__}: {e}", exc_info=True)
current_errors[f"process_{target_key}"] = str(e)
else:
logger.warning(f"No processor found for sheet name '{sheet_name}' derived from range '{range_str}' in Scammer/DWC sheet.")
# --- Process Values/Dupes Results ---
if raw_values_dupes_results is not None:
logger.info(f"Processing {len(raw_values_dupes_results)} valueRanges from Values/Dupes sheet...")
for vr in raw_values_dupes_results:
range_str = vr.get('range', '')
match = re.match(r"^'?([^'!]+)'?!", range_str)
if not match:
logger.warning(f"Could not extract sheet name from range '{range_str}' in Values/Dupes response.")
continue
sheet_name = match.group(1).replace("''", "'")
values = vr.get('values', [])
try:
if sheet_name == DUPE_LIST_SHEET:
processed_data = process_dupe_list_data(values)
new_cache_data["dupes"] = processed_data
logger.info(f"Processed {len(processed_data)} items for {DUPE_LIST_SHEET} -> dupes")
elif sheet_name in CATEGORIES:
processed_data = process_sheet_data(values)
fetched_values_categories[sheet_name] = processed_data
logger.info(f"Processed {len(processed_data)} items for Category: {sheet_name}")
else:
logger.warning(f"Unrecognized sheet name '{sheet_name}' derived from range '{range_str}' in Values/Dupes sheet.")
except Exception as e:
target_key = "dupes" if sheet_name == DUPE_LIST_SHEET else f"values_{sheet_name}"
logger.error(f"Error processing data for {sheet_name}: {e}", exc_info=True)
current_errors[f"process_{target_key}"] = str(e)
# --- Detect Value Changes ---
logger.info("Comparing fetched values with cached values...")
current_time = datetime.now(timezone.utc)
detected_value_changes = {}
fields_to_compare = ['value', 'dupedValue', 'marketValue']
if "values" not in cache: cache["values"] = {} # Ensure exists
for category, new_items in fetched_values_categories.items():
old_items_dict = {item['name']: item for item in cache["values"].get(category, [])}
category_changes = []
for new_item in new_items:
item_name = new_item.get('name')
if not item_name or item_name == 'N/A': continue
old_item = old_items_dict.get(item_name)
if old_item: # Check existing item for changes
for field in fields_to_compare:
old_val_str = old_item.get(field, 'N/A')
new_val_str = new_item.get(field, 'N/A')
old_norm = parse_cached_currency(old_val_str) if parse_cached_currency(old_val_str) is not None else old_val_str
new_norm = parse_cached_currency(new_val_str) if parse_cached_currency(new_val_str) is not None else new_val_str
if old_norm != new_norm:
logger.info(f"Change detected in {category}: {item_name} - {field}: '{old_val_str}' -> '{new_val_str}'")
category_changes.append({
"item_name": item_name, "field": field,
"old_value": old_val_str if old_val_str is not None else "N/A",
"new_value": new_val_str if new_val_str is not None else "N/A",
"timestamp": current_time.isoformat()
})
if category_changes:
detected_value_changes[category] = category_changes
# --- Fetch Roblox Avatars ---
logger.info("Fetching Roblox avatars...")
avatar_tasks = []
# Combine lists needing avatars (only user scammers and DWC have roblox usernames)
entries_needing_avatars = new_cache_data.get("user_scammers", []) + new_cache_data.get("dwc", [])
for entry in entries_needing_avatars:
if entry.get('roblox_username'):
# Pass the specific entry dict to the update function
avatar_tasks.append(fetch_avatar_for_entry_update(session, entry))
if avatar_tasks:
await asyncio.gather(*avatar_tasks) # Exceptions logged within helper
logger.info(f"Finished fetching avatars for {len(avatar_tasks)} potential entries.")
# --- Final Cache Update ---
update_occurred = False
if not current_errors: # Perfect cycle
logger.info("Updating full cache (no errors during fetch or processing).")
cache["values"] = fetched_values_categories
cache["user_scammers"] = new_cache_data["user_scammers"]
cache["server_scammers"] = new_cache_data["server_scammers"]
cache["dwc"] = new_cache_data["dwc"]
cache["dupes"] = new_cache_data["dupes"]
cache["value_changes"] = detected_value_changes
cache["last_updated"] = current_time
cache["is_ready"] = True
update_occurred = True
logger.info(f"Cache update cycle completed successfully.")
else: # Errors occurred, attempt partial update
logger.warning(f"Cache update cycle completed with errors: {current_errors}. Attempting partial update.")
partial_update_details = []
# Update values only if the values/dupes batch succeeded AND processing succeeded
if "values_dupes_batch" not in current_errors and not any(k.startswith("process_values_") for k in current_errors):
if cache["values"] != fetched_values_categories:
cache["values"] = fetched_values_categories
cache["value_changes"] = detected_value_changes # Update changes along with values
partial_update_details.append("values")
update_occurred = True
# Update dupes only if the values/dupes batch succeeded AND processing succeeded
if "values_dupes_batch" not in current_errors and "process_dupes" not in current_errors:
if cache["dupes"] != new_cache_data["dupes"]:
cache["dupes"] = new_cache_data["dupes"]
partial_update_details.append("dupes")
update_occurred = True
# Update scammer/DWC sections if their batch succeeded AND processing succeeded
if "scammer_dwc_batch" not in current_errors:
for key in ["user_scammers", "server_scammers", "dwc"]:
process_error_key = f"process_{key}"
if process_error_key not in current_errors:
if cache[key] != new_cache_data[key]:
cache[key] = new_cache_data[key]
partial_update_details.append(key)
update_occurred = True
if update_occurred:
cache["last_updated"] = current_time # Mark partial update time
cache["is_ready"] = True # Allow access even if partial
logger.info(f"Partially updated cache sections: {', '.join(partial_update_details)}")
else:
logger.error(f"Cache update cycle failed, and no parts could be updated based on errors. Errors: {current_errors}")
# Keep cache["is_ready"] as it was.
except Exception as e:
logger.exception(f"Critical error during cache update cycle: {e}")
if isinstance(e, (aiohttp.ClientError, HttpError, asyncio.TimeoutError)):
logger.warning("Communication error detected, will re-check service availability next cycle.")
# --- Wait for the next cycle ---
end_time = datetime.now(timezone.utc)
duration = (end_time - start_time).total_seconds()
wait_time = max(10, CACHE_UPDATE_INTERVAL_SECONDS - duration)
logger.info(f"Cache update cycle duration: {duration:.2f}s. Waiting {wait_time:.2f}s for next cycle.")
await asyncio.sleep(wait_time)
async def fetch_avatar_for_entry_update(session: aiohttp.ClientSession, entry: dict):
"""Fetches avatar and updates the provided entry dictionary IN PLACE."""
roblox_username = entry.get('roblox_username')
if not roblox_username: return
current_avatar = entry.get('roblox_avatar_url')
new_avatar = None # Default to None
try:
user_id = await get_roblox_user_id(session, roblox_username)
if user_id:
new_avatar = await get_roblox_avatar_url(session, user_id)
except Exception as e:
# Log errors but don't stop the main update loop
logger.warning(f"Failed to fetch avatar for {roblox_username}: {e}")
# Keep new_avatar as None on error
finally:
# Update the dict only if the value has actually changed
if current_avatar != new_avatar:
entry['roblox_avatar_url'] = new_avatar
# --- FastAPI Startup Event ---
@app.on_event("startup")
async def startup_event():
"""Starts the background cache update task."""
if not cache["service_available"]:
logger.warning("Google Sheets service not available at startup. Will attempt re-init in background task.")
logger.info("Starting background cache update task...")
asyncio.create_task(update_cache_periodically())
# --- API Endpoints (Largely unchanged, rely on cache state) ---
def check_cache_readiness():
"""Reusable check for API endpoints - Checks cache readiness"""
if not cache["is_ready"]:
raise HTTPException(status_code=503, detail="Cache is initializing or data is currently unavailable. Please try again shortly.")
@app.get("/")
async def root():
return {"message": "JB Vanta API - Running"}
@app.get("/api/status")
async def get_status():
"""Returns the current status of the cache and service availability"""
return {
"cache_ready": cache["is_ready"],
"sheets_service_available": cache["service_available"],
"last_updated": cache["last_updated"].isoformat() if cache["last_updated"] else None,
"cached_items": {
"value_categories": len(cache["values"]),
"user_scammers": len(cache["user_scammers"]),
"server_scammers": len(cache["server_scammers"]),
"dwc_entries": len(cache["dwc"]),
"duped_usernames": len(cache["dupes"]),
},
"value_change_categories": len(cache.get("value_changes", {}))
}
@app.get("/api/values")
async def get_values():
"""Get all values data from cache"""
check_cache_readiness()
return cache["values"]
@app.get("/api/values/{category}")
async def get_category_values(category: str):
"""Get values data for a specific category from cache"""
check_cache_readiness()
matched_category = next((c for c in CATEGORIES if c.lower() == category.lower()), None)
if not matched_category:
raise HTTPException(status_code=404, detail=f"Category '{category}' not found.")
return {matched_category: cache["values"].get(matched_category, [])}
@app.get("/api/value-changes/{category}")
async def get_category_value_changes(category: str):
"""Get detected value changes for a specific category."""
check_cache_readiness()
matched_category = next((c for c in CATEGORIES if c.lower() == category.lower()), None)
if not matched_category:
raise HTTPException(status_code=404, detail=f"Category '{category}' not found.")
return {matched_category: cache.get("value_changes", {}).get(matched_category, [])}
@app.get("/api/value-changes")
async def get_all_value_changes():
"""Get all detected value changes from the last cycle."""
check_cache_readiness()
return cache.get("value_changes", {})
@app.get("/api/scammers")
async def get_scammers():
"""Get all scammer and DWC data (users, servers, dwc) from cache"""
check_cache_readiness()
return {
"users": cache["user_scammers"],
"servers": cache["server_scammers"],
"dwc": cache["dwc"]
}
@app.get("/api/dupes")
async def get_dupes():
"""Get all duped usernames from cache"""
check_cache_readiness()
# Handle case where dupes might be None temporarily during init failure
return {"usernames": cache.get("dupes") or []}
class UsernameCheck(BaseModel):
username: str
@app.post("/api/check")
async def check_username(data: UsernameCheck):
"""Check if a username is duped using cached data and send webhook"""
check_cache_readiness() # Use the standard readiness check
username_to_check = data.username.strip().lower()
is_duped = username_to_check in (cache.get("dupes") or [])
# Webhook notification (runs in background)
if not is_duped:
webhook_url = os.getenv("WEBHOOK_URL")
if webhook_url:
async def send_webhook_notification():
try:
async with aiohttp.ClientSession() as session:
webhook_data = {
"content": None,
"embeds": [{
"title": "New Dupe Check - Not Found",
"description": f"Username `{data.username}` was checked but not found in the dupe database.",
"color": 16776960, # Yellow
"timestamp": datetime.now(timezone.utc).isoformat()
}]
}
async with session.post(webhook_url, json=webhook_data) as response:
if response.status not in [200, 204]:
logger.warning(f"Failed to send webhook (Status: {response.status}): {await response.text()}")
except Exception as e:
logger.error(f"Error sending webhook: {e}")
asyncio.create_task(send_webhook_notification())
else:
logger.info("Webhook URL not configured. Skipping notification.")
return {"username": data.username, "is_duped": is_duped}
@app.get("/health")
def health_check():
"""Provides a health status of the API and its cache."""
if not cache["is_ready"]:
return {"status": "initializing"}
if not cache["service_available"]:
return {"status": "degraded", "reason": "Sheets service connection issue"}
if cache["last_updated"] and (datetime.now(timezone.utc) - cache["last_updated"]).total_seconds() > CACHE_UPDATE_INTERVAL_SECONDS * 3:
return {"status": "degraded", "reason": "Cache potentially stale (last update > 3 intervals ago)"}
return {"status": "ok"}
# --- END OF FILE main.py ---