tfrere commited on
Commit
5c2e213
·
1 Parent(s): 63b2074

fix votes and update submit limitation

Browse files
backend/app/api/endpoints/votes.py CHANGED
@@ -1,14 +1,17 @@
1
- from fastapi import APIRouter, HTTPException, Query, Depends
2
  from typing import Dict, Any, List
3
  from app.services.votes import VoteService
4
  from app.core.fastapi_cache import cached, build_cache_key, invalidate_cache_key
5
  import logging
6
  from app.core.formatting import LogFormatter
 
7
 
8
  logger = logging.getLogger(__name__)
9
  router = APIRouter()
10
  vote_service = VoteService()
11
 
 
 
12
  def model_votes_key_builder(func, namespace: str = "model_votes", **kwargs):
13
  """Build cache key for model votes"""
14
  provider = kwargs.get('provider')
@@ -26,22 +29,25 @@ def user_votes_key_builder(func, namespace: str = "user_votes", **kwargs):
26
 
27
  @router.post("/{model_id:path}")
28
  async def add_vote(
 
29
  model_id: str,
30
  vote_type: str = Query(..., description="Type of vote (up/down)"),
31
- user_id: str = Query(..., description="HuggingFace username")
 
32
  ) -> Dict[str, Any]:
33
  try:
34
  logger.info(LogFormatter.section("ADDING VOTE"))
35
  stats = {
36
  "Model": model_id,
37
  "User": user_id,
38
- "Type": vote_type
 
39
  }
40
  for line in LogFormatter.tree(stats, "Vote Details"):
41
  logger.info(line)
42
 
43
  await vote_service.initialize()
44
- result = await vote_service.add_vote(model_id, user_id, vote_type)
45
 
46
  # Invalidate affected caches
47
  try:
@@ -52,8 +58,8 @@ async def add_vote(
52
  model_cache_key = build_cache_key("model_votes", provider, model)
53
  user_cache_key = build_cache_key("user_votes", user_id)
54
 
55
- invalidate_cache_key(model_cache_key)
56
- invalidate_cache_key(user_cache_key)
57
 
58
  cache_stats = {
59
  "Model_Cache": model_cache_key,
@@ -65,14 +71,18 @@ async def add_vote(
65
  except Exception as e:
66
  logger.error(LogFormatter.error("Failed to invalidate cache", e))
67
 
 
 
 
68
  return result
69
  except Exception as e:
70
  logger.error(LogFormatter.error("Failed to add vote", e))
71
  raise HTTPException(status_code=400, detail=str(e))
72
 
73
  @router.get("/model/{provider}/{model}")
74
- @cached(expire=60, key_builder=model_votes_key_builder)
75
  async def get_model_votes(
 
76
  provider: str,
77
  model: str
78
  ) -> Dict[str, Any]:
@@ -82,6 +92,11 @@ async def get_model_votes(
82
  await vote_service.initialize()
83
  model_id = f"{provider}/{model}"
84
  result = await vote_service.get_model_votes(model_id)
 
 
 
 
 
85
  logger.info(LogFormatter.success(f"Found {result.get('total_votes', 0)} votes"))
86
  return result
87
  except Exception as e:
@@ -89,8 +104,9 @@ async def get_model_votes(
89
  raise HTTPException(status_code=400, detail=str(e))
90
 
91
  @router.get("/user/{user_id}")
92
- @cached(expire=60, key_builder=user_votes_key_builder)
93
  async def get_user_votes(
 
94
  user_id: str
95
  ) -> List[Dict[str, Any]]:
96
  """Get all votes from a specific user"""
@@ -98,6 +114,11 @@ async def get_user_votes(
98
  logger.info(LogFormatter.info(f"Fetching votes for user: {user_id}"))
99
  await vote_service.initialize()
100
  votes = await vote_service.get_user_votes(user_id)
 
 
 
 
 
101
  logger.info(LogFormatter.success(f"Found {len(votes)} votes"))
102
  return votes
103
  except Exception as e:
 
1
+ from fastapi import APIRouter, HTTPException, Query, Depends, Response
2
  from typing import Dict, Any, List
3
  from app.services.votes import VoteService
4
  from app.core.fastapi_cache import cached, build_cache_key, invalidate_cache_key
5
  import logging
6
  from app.core.formatting import LogFormatter
7
+ from datetime import datetime, timezone
8
 
9
  logger = logging.getLogger(__name__)
10
  router = APIRouter()
11
  vote_service = VoteService()
12
 
13
+ CACHE_TTL = 30 # 30 seconds cache
14
+
15
  def model_votes_key_builder(func, namespace: str = "model_votes", **kwargs):
16
  """Build cache key for model votes"""
17
  provider = kwargs.get('provider')
 
29
 
30
  @router.post("/{model_id:path}")
31
  async def add_vote(
32
+ response: Response,
33
  model_id: str,
34
  vote_type: str = Query(..., description="Type of vote (up/down)"),
35
+ user_id: str = Query(..., description="HuggingFace username"),
36
+ vote_data: Dict[str, Any] = None
37
  ) -> Dict[str, Any]:
38
  try:
39
  logger.info(LogFormatter.section("ADDING VOTE"))
40
  stats = {
41
  "Model": model_id,
42
  "User": user_id,
43
+ "Type": vote_type,
44
+ "Config": vote_data or {}
45
  }
46
  for line in LogFormatter.tree(stats, "Vote Details"):
47
  logger.info(line)
48
 
49
  await vote_service.initialize()
50
+ result = await vote_service.add_vote(model_id, user_id, vote_type, vote_data)
51
 
52
  # Invalidate affected caches
53
  try:
 
58
  model_cache_key = build_cache_key("model_votes", provider, model)
59
  user_cache_key = build_cache_key("user_votes", user_id)
60
 
61
+ await invalidate_cache_key(model_cache_key)
62
+ await invalidate_cache_key(user_cache_key)
63
 
64
  cache_stats = {
65
  "Model_Cache": model_cache_key,
 
71
  except Exception as e:
72
  logger.error(LogFormatter.error("Failed to invalidate cache", e))
73
 
74
+ # Add cache control headers
75
+ response.headers["Cache-Control"] = "no-cache"
76
+
77
  return result
78
  except Exception as e:
79
  logger.error(LogFormatter.error("Failed to add vote", e))
80
  raise HTTPException(status_code=400, detail=str(e))
81
 
82
  @router.get("/model/{provider}/{model}")
83
+ @cached(expire=CACHE_TTL, key_builder=model_votes_key_builder)
84
  async def get_model_votes(
85
+ response: Response,
86
  provider: str,
87
  model: str
88
  ) -> Dict[str, Any]:
 
92
  await vote_service.initialize()
93
  model_id = f"{provider}/{model}"
94
  result = await vote_service.get_model_votes(model_id)
95
+
96
+ # Add cache control headers
97
+ response.headers["Cache-Control"] = f"max-age={CACHE_TTL}"
98
+ response.headers["Last-Modified"] = vote_service._last_sync.strftime("%a, %d %b %Y %H:%M:%S GMT")
99
+
100
  logger.info(LogFormatter.success(f"Found {result.get('total_votes', 0)} votes"))
101
  return result
102
  except Exception as e:
 
104
  raise HTTPException(status_code=400, detail=str(e))
105
 
106
  @router.get("/user/{user_id}")
107
+ @cached(expire=CACHE_TTL, key_builder=user_votes_key_builder)
108
  async def get_user_votes(
109
+ response: Response,
110
  user_id: str
111
  ) -> List[Dict[str, Any]]:
112
  """Get all votes from a specific user"""
 
114
  logger.info(LogFormatter.info(f"Fetching votes for user: {user_id}"))
115
  await vote_service.initialize()
116
  votes = await vote_service.get_user_votes(user_id)
117
+
118
+ # Add cache control headers
119
+ response.headers["Cache-Control"] = f"max-age={CACHE_TTL}"
120
+ response.headers["Last-Modified"] = vote_service._last_sync.strftime("%a, %d %b %Y %H:%M:%S GMT")
121
+
122
  logger.info(LogFormatter.success(f"Found {len(votes)} votes"))
123
  return votes
124
  except Exception as e:
backend/app/core/fastapi_cache.py CHANGED
@@ -5,35 +5,63 @@ from datetime import timedelta
5
  from app.config import CACHE_TTL
6
  import logging
7
  from app.core.formatting import LogFormatter
 
8
 
9
  logger = logging.getLogger(__name__)
10
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
  def setup_cache():
12
  """Initialize FastAPI Cache with in-memory backend"""
13
- FastAPICache.init(
14
- backend=InMemoryBackend(),
15
- prefix="fastapi-cache",
16
- expire=CACHE_TTL
17
- )
18
- logger.info(LogFormatter.success("FastAPI Cache initialized with in-memory backend"))
19
-
20
- def invalidate_cache_key(key: str):
 
 
 
 
21
  """Invalidate a specific cache key"""
22
  try:
23
  backend = FastAPICache.get_backend()
24
  if hasattr(backend, 'delete'):
25
- backend.delete(key)
26
  logger.info(LogFormatter.success(f"Cache invalidated for key: {key}"))
27
  else:
28
  logger.warning(LogFormatter.warning("Cache backend does not support deletion"))
29
  except Exception as e:
30
  logger.error(LogFormatter.error(f"Failed to invalidate cache key: {key}", e))
31
 
32
- def build_cache_key(namespace: str, *args) -> str:
33
- """Build a consistent cache key"""
34
- key = f"fastapi-cache:{namespace}:{':'.join(str(arg) for arg in args)}"
35
- logger.debug(LogFormatter.info(f"Built cache key: {key}"))
36
- return key
37
 
38
  def cached(expire: int = CACHE_TTL, key_builder=None):
39
  """Decorator for caching endpoint responses
 
5
  from app.config import CACHE_TTL
6
  import logging
7
  from app.core.formatting import LogFormatter
8
+ from typing import Optional, Any
9
 
10
  logger = logging.getLogger(__name__)
11
 
12
+ class CustomInMemoryBackend(InMemoryBackend):
13
+ def __init__(self):
14
+ """Initialize the cache backend"""
15
+ super().__init__()
16
+ self.cache = {}
17
+
18
+ async def delete(self, key: str) -> bool:
19
+ """Delete a key from the cache"""
20
+ try:
21
+ if key in self.cache:
22
+ del self.cache[key]
23
+ return True
24
+ return False
25
+ except Exception as e:
26
+ logger.error(LogFormatter.error(f"Failed to delete key {key} from cache", e))
27
+ return False
28
+
29
+ async def get(self, key: str) -> Any:
30
+ """Get a value from the cache"""
31
+ return self.cache.get(key)
32
+
33
+ async def set(self, key: str, value: Any, expire: Optional[int] = None) -> None:
34
+ """Set a value in the cache"""
35
+ self.cache[key] = value
36
+
37
  def setup_cache():
38
  """Initialize FastAPI Cache with in-memory backend"""
39
+ try:
40
+ logger.info(LogFormatter.section("CACHE INITIALIZATION"))
41
+ FastAPICache.init(
42
+ backend=CustomInMemoryBackend(),
43
+ prefix="fastapi-cache"
44
+ )
45
+ logger.info(LogFormatter.success("Cache initialized successfully"))
46
+ except Exception as e:
47
+ logger.error(LogFormatter.error("Failed to initialize cache", e))
48
+ raise
49
+
50
+ async def invalidate_cache_key(key: str):
51
  """Invalidate a specific cache key"""
52
  try:
53
  backend = FastAPICache.get_backend()
54
  if hasattr(backend, 'delete'):
55
+ await backend.delete(key)
56
  logger.info(LogFormatter.success(f"Cache invalidated for key: {key}"))
57
  else:
58
  logger.warning(LogFormatter.warning("Cache backend does not support deletion"))
59
  except Exception as e:
60
  logger.error(LogFormatter.error(f"Failed to invalidate cache key: {key}", e))
61
 
62
+ def build_cache_key(*args) -> str:
63
+ """Build a cache key from multiple arguments"""
64
+ return ":".join(str(arg) for arg in args if arg is not None)
 
 
65
 
66
  def cached(expire: int = CACHE_TTL, key_builder=None):
67
  """Decorator for caching endpoint responses
backend/app/services/models.py CHANGED
@@ -217,24 +217,25 @@ class ModelService(HuggingFaceService):
217
 
218
  try:
219
  logger.info(LogFormatter.subsection("DATASET LOADING"))
220
- logger.info(LogFormatter.info("Loading dataset files..."))
221
 
222
- # List files in repository
223
  with suppress_output():
224
- files = self.hf_api.list_repo_files(
225
  repo_id=QUEUE_REPO,
226
  repo_type="dataset",
227
  token=self.token
228
  )
229
 
230
- # Filter JSON files
231
- json_files = [f for f in files if f.endswith('.json')]
 
232
  total_files = len(json_files)
233
 
234
  # Log repository stats
235
  stats = {
236
- "Total_Files": len(files),
237
- "JSON_Files": total_files,
238
  }
239
  for line in LogFormatter.stats(stats, "Repository Statistics"):
240
  logger.info(line)
@@ -245,28 +246,67 @@ class ModelService(HuggingFaceService):
245
  # Initialize progress tracker
246
  progress = ProgressTracker(total_files, "PROCESSING FILES")
247
 
248
- try:
249
- # Create aiohttp session to reuse connections
250
- async with aiohttp.ClientSession() as session:
251
- # Process files in chunks
252
- chunk_size = 50
 
 
 
 
 
 
 
 
 
 
253
 
254
- for i in range(0, len(json_files), chunk_size):
255
- chunk = json_files[i:i + chunk_size]
256
- chunk_tasks = [
257
- self._download_and_process_file(file, session, progress)
258
- for file in chunk
259
- ]
260
- results = await asyncio.gather(*chunk_tasks)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
261
 
262
- # Process results
263
- for result in results:
264
- if result:
265
- status = result.pop("status")
266
- models[status.lower()].append(result)
267
 
268
- finally:
269
- progress.close()
 
 
 
270
 
271
  # Final summary with fancy formatting
272
  logger.info(LogFormatter.section("CACHE SUMMARY"))
@@ -516,7 +556,7 @@ class ModelService(HuggingFaceService):
516
  # Construct the path in the dataset
517
  org_or_user = model_data["model_id"].split("/")[0] if "/" in model_data["model_id"] else ""
518
  model_path = model_data["model_id"].split("/")[-1]
519
- relative_path = f"{org_or_user}/{model_path}_eval_request_False_{model_data['precision']}_{model_data['weight_type']}.json"
520
 
521
  # Create a temporary file with the request
522
  with tempfile.NamedTemporaryFile(mode='w', suffix='.json', delete=False) as temp_file:
@@ -550,7 +590,11 @@ class ModelService(HuggingFaceService):
550
  await self.vote_service.add_vote(
551
  model_data["model_id"],
552
  user_id,
553
- "up"
 
 
 
 
554
  )
555
  logger.info(LogFormatter.success("Vote recorded successfully"))
556
  except Exception as e:
@@ -587,26 +631,26 @@ class ModelService(HuggingFaceService):
587
  return {"status": "not_found"}
588
 
589
  async def get_organization_submissions(self, organization: str, days: int = 7) -> List[Dict[str, Any]]:
590
- """Get all submissions from an organization in the last n days"""
591
  try:
592
  # Get all models
593
  all_models = await self.get_models()
594
  current_time = datetime.now(timezone.utc)
595
  cutoff_time = current_time - timedelta(days=days)
596
 
597
- # Filter models by organization and submission time
598
- org_submissions = []
599
  for status, models in all_models.items():
600
  for model in models:
601
- # Check if model belongs to organization
602
- if model["name"].startswith(f"{organization}/"):
603
  # Parse submission time
604
  submit_time = datetime.fromisoformat(
605
  model["submission_time"].replace("Z", "+00:00")
606
  )
607
  # Check if within time window
608
  if submit_time > cutoff_time:
609
- org_submissions.append({
610
  "name": model["name"],
611
  "status": status,
612
  "submission_time": model["submission_time"],
@@ -614,7 +658,7 @@ class ModelService(HuggingFaceService):
614
  })
615
 
616
  return sorted(
617
- org_submissions,
618
  key=lambda x: x["submission_time"],
619
  reverse=True
620
  )
 
217
 
218
  try:
219
  logger.info(LogFormatter.subsection("DATASET LOADING"))
220
+ logger.info(LogFormatter.info("Loading dataset..."))
221
 
222
+ # Download entire dataset snapshot
223
  with suppress_output():
224
+ local_dir = self.hf_api.snapshot_download(
225
  repo_id=QUEUE_REPO,
226
  repo_type="dataset",
227
  token=self.token
228
  )
229
 
230
+ # List JSON files in local directory
231
+ local_path = Path(local_dir)
232
+ json_files = list(local_path.glob("**/*.json"))
233
  total_files = len(json_files)
234
 
235
  # Log repository stats
236
  stats = {
237
+ "Total_Files": total_files,
238
+ "Local_Dir": str(local_path),
239
  }
240
  for line in LogFormatter.stats(stats, "Repository Statistics"):
241
  logger.info(line)
 
246
  # Initialize progress tracker
247
  progress = ProgressTracker(total_files, "PROCESSING FILES")
248
 
249
+ # Process local files
250
+ model_submissions = {} # Dict to track latest submission for each (model_id, revision, precision)
251
+ for file_path in json_files:
252
+ try:
253
+ with open(file_path, 'r') as f:
254
+ content = json.load(f)
255
+
256
+ # Get status and determine target status
257
+ status = content.get("status", "PENDING").upper()
258
+ target_status = None
259
+ status_map = {
260
+ "PENDING": ["PENDING"],
261
+ "EVALUATING": ["RUNNING"],
262
+ "FINISHED": ["FINISHED"]
263
+ }
264
 
265
+ for target, source_statuses in status_map.items():
266
+ if status in source_statuses:
267
+ target_status = target
268
+ break
269
+
270
+ if not target_status:
271
+ progress.update()
272
+ continue
273
+
274
+ # Calculate wait time
275
+ try:
276
+ submit_time = datetime.fromisoformat(content["submitted_time"].replace("Z", "+00:00"))
277
+ if submit_time.tzinfo is None:
278
+ submit_time = submit_time.replace(tzinfo=timezone.utc)
279
+ current_time = datetime.now(timezone.utc)
280
+ wait_time = current_time - submit_time
281
+
282
+ model_info = {
283
+ "name": content["model"],
284
+ "submitter": content.get("sender", "Unknown"),
285
+ "revision": content["revision"],
286
+ "wait_time": f"{wait_time.total_seconds():.1f}s",
287
+ "submission_time": content["submitted_time"],
288
+ "status": target_status,
289
+ "precision": content.get("precision", "Unknown")
290
+ }
291
+
292
+ # Use (model_id, revision, precision) as key to track latest submission
293
+ key = (content["model"], content["revision"], content.get("precision", "Unknown"))
294
+ if key not in model_submissions or submit_time > datetime.fromisoformat(model_submissions[key]["submission_time"].replace("Z", "+00:00")):
295
+ model_submissions[key] = model_info
296
+
297
+ except (ValueError, TypeError) as e:
298
+ logger.error(LogFormatter.error(f"Failed to process {file_path.name}", e))
299
 
300
+ except Exception as e:
301
+ logger.error(LogFormatter.error(f"Failed to load {file_path.name}", e))
302
+ finally:
303
+ progress.update()
 
304
 
305
+ # Populate models dict with deduplicated submissions
306
+ for model_info in model_submissions.values():
307
+ models[model_info["status"].lower()].append(model_info)
308
+
309
+ progress.close()
310
 
311
  # Final summary with fancy formatting
312
  logger.info(LogFormatter.section("CACHE SUMMARY"))
 
556
  # Construct the path in the dataset
557
  org_or_user = model_data["model_id"].split("/")[0] if "/" in model_data["model_id"] else ""
558
  model_path = model_data["model_id"].split("/")[-1]
559
+ relative_path = f"{org_or_user}/{model_path}_eval_request_False_{model_data['precision']}.json"
560
 
561
  # Create a temporary file with the request
562
  with tempfile.NamedTemporaryFile(mode='w', suffix='.json', delete=False) as temp_file:
 
590
  await self.vote_service.add_vote(
591
  model_data["model_id"],
592
  user_id,
593
+ "up",
594
+ {
595
+ "precision": model_data["precision"],
596
+ "revision": model_data["revision"]
597
+ }
598
  )
599
  logger.info(LogFormatter.success("Vote recorded successfully"))
600
  except Exception as e:
 
631
  return {"status": "not_found"}
632
 
633
  async def get_organization_submissions(self, organization: str, days: int = 7) -> List[Dict[str, Any]]:
634
+ """Get all submissions from a user in the last n days"""
635
  try:
636
  # Get all models
637
  all_models = await self.get_models()
638
  current_time = datetime.now(timezone.utc)
639
  cutoff_time = current_time - timedelta(days=days)
640
 
641
+ # Filter models by submitter and submission time
642
+ user_submissions = []
643
  for status, models in all_models.items():
644
  for model in models:
645
+ # Check if model was submitted by the user
646
+ if model["submitter"] == organization:
647
  # Parse submission time
648
  submit_time = datetime.fromisoformat(
649
  model["submission_time"].replace("Z", "+00:00")
650
  )
651
  # Check if within time window
652
  if submit_time > cutoff_time:
653
+ user_submissions.append({
654
  "name": model["name"],
655
  "status": status,
656
  "submission_time": model["submission_time"],
 
658
  })
659
 
660
  return sorted(
661
+ user_submissions,
662
  key=lambda x: x["submission_time"],
663
  reverse=True
664
  )
backend/app/services/votes.py CHANGED
@@ -6,7 +6,8 @@ import asyncio
6
  from pathlib import Path
7
  import aiohttp
8
  from huggingface_hub import HfApi
9
- import datasets
 
10
 
11
  from app.services.hf_service import HuggingFaceService
12
  from app.config import HF_TOKEN
@@ -30,17 +31,15 @@ class VoteService(HuggingFaceService):
30
  super().__init__()
31
  self.votes_file = cache_config.votes_file
32
  self.votes_to_upload: List[Dict[str, Any]] = []
33
- self.vote_check_set: Set[Tuple[str, str, str]] = set()
34
  self._votes_by_model: Dict[str, List[Dict[str, Any]]] = {}
35
  self._votes_by_user: Dict[str, List[Dict[str, Any]]] = {}
36
- self._upload_lock = asyncio.Lock()
37
  self._last_sync = None
38
  self._sync_interval = 300 # 5 minutes
39
  self._total_votes = 0
40
  self._last_vote_timestamp = None
41
  self._max_retries = 3
42
  self._retry_delay = 1 # seconds
43
- self._upload_batch_size = 10
44
  self.hf_api = HfApi(token=HF_TOKEN)
45
  self._init_done = True
46
 
@@ -56,29 +55,21 @@ class VoteService(HuggingFaceService):
56
  # Ensure votes directory exists
57
  self.votes_file.parent.mkdir(parents=True, exist_ok=True)
58
 
59
- # Load existing votes if file exists
60
- local_vote_count = 0
61
- if self.votes_file.exists():
62
- logger.info(LogFormatter.info(f"Loading votes from {self.votes_file}"))
63
- local_vote_count = await self._count_local_votes()
64
- logger.info(LogFormatter.info(f"Found {local_vote_count:,} local votes"))
65
-
66
- # Check remote votes count
67
- remote_vote_count = await self._count_remote_votes()
68
- logger.info(LogFormatter.info(f"Found {remote_vote_count:,} remote votes"))
69
-
70
- if remote_vote_count > local_vote_count:
71
- logger.info(LogFormatter.info(f"Fetching {remote_vote_count - local_vote_count:,} new votes"))
72
- await self._sync_with_hub()
73
- elif remote_vote_count < local_vote_count:
74
- logger.warning(LogFormatter.warning(f"Local votes ({local_vote_count:,}) > Remote votes ({remote_vote_count:,})"))
75
  await self._load_existing_votes()
76
  else:
77
- logger.info(LogFormatter.success("Local and remote votes are in sync"))
78
- if local_vote_count > 0:
79
- await self._load_existing_votes()
80
- else:
81
- logger.info(LogFormatter.info("No votes found"))
82
 
83
  self._initialized = True
84
  self._last_sync = datetime.now(timezone.utc)
@@ -96,80 +87,105 @@ class VoteService(HuggingFaceService):
96
  logger.error(LogFormatter.error("Initialization failed", e))
97
  raise
98
 
99
- async def _count_local_votes(self) -> int:
100
- """Count votes in local file"""
101
- if not self.votes_file.exists():
102
- return 0
103
-
104
- count = 0
105
- try:
106
- with open(self.votes_file, 'r') as f:
107
- for _ in f:
108
- count += 1
109
- return count
110
- except Exception as e:
111
- logger.error(f"Error counting local votes: {str(e)}")
112
- return 0
113
-
114
- async def _count_remote_votes(self) -> int:
115
- """Count votes in remote file"""
116
  url = f"https://huggingface.co/datasets/{HF_ORGANIZATION}/votes/raw/main/votes_data.jsonl"
117
- headers = {"Authorization": f"Bearer {HF_TOKEN}"} if HF_TOKEN else {}
118
 
119
  try:
120
  async with aiohttp.ClientSession() as session:
121
  async with session.get(url, headers=headers) as response:
122
  if response.status == 200:
123
- count = 0
124
  async for line in response.content:
125
- if line.strip(): # Skip empty lines
126
- count += 1
127
- return count
 
 
 
 
128
  else:
129
  logger.error(f"Failed to get remote votes: HTTP {response.status}")
130
- return 0
131
  except Exception as e:
132
- logger.error(f"Error counting remote votes: {str(e)}")
133
- return 0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
134
 
135
  async def _sync_with_hub(self):
136
- """Sync votes with HuggingFace hub using datasets"""
137
  try:
138
  logger.info(LogFormatter.section("VOTE SYNC"))
139
- self._log_repo_operation("sync", f"{HF_ORGANIZATION}/votes", "Syncing local votes with HF hub")
140
- logger.info(LogFormatter.info("Syncing with HuggingFace hub..."))
141
-
142
- # Load votes from HF dataset
143
- dataset = datasets.load_dataset(
144
- f"{HF_ORGANIZATION}/votes",
145
- split="train",
146
- cache_dir=cache_config.get_cache_path("datasets")
147
- )
148
-
149
- remote_votes = len(dataset)
150
- logger.info(LogFormatter.info(f"Dataset loaded with {remote_votes:,} votes"))
151
 
152
- # Convert to list of dictionaries
153
- df = dataset.to_pandas()
154
- if 'timestamp' in df.columns:
155
- df['timestamp'] = df['timestamp'].dt.strftime('%Y-%m-%dT%H:%M:%SZ')
156
- remote_votes = df.to_dict('records')
157
 
158
- # If we have more remote votes than local
159
- if len(remote_votes) > self._total_votes:
160
- new_votes = len(remote_votes) - self._total_votes
161
- logger.info(LogFormatter.info(f"Processing {new_votes:,} new votes..."))
162
 
163
- # Save votes to local file
164
- with open(self.votes_file, 'w') as f:
 
 
 
165
  for vote in remote_votes:
166
- f.write(json.dumps(vote) + '\n')
 
 
167
 
168
- # Reload votes in memory
169
- await self._load_existing_votes()
170
- logger.info(LogFormatter.success("Sync completed successfully"))
171
- else:
172
- logger.info(LogFormatter.success("Local votes are up to date"))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
173
 
174
  self._last_sync = datetime.now(timezone.utc)
175
 
@@ -177,23 +193,6 @@ class VoteService(HuggingFaceService):
177
  logger.error(LogFormatter.error("Sync failed", e))
178
  raise
179
 
180
- async def _check_for_new_votes(self):
181
- """Check for new votes on the hub"""
182
- try:
183
- self._log_repo_operation("check", f"{HF_ORGANIZATION}/votes", "Checking for new votes")
184
- # Load only dataset metadata
185
- dataset_info = datasets.load_dataset(f"{HF_ORGANIZATION}/votes", split="train")
186
- remote_vote_count = len(dataset_info)
187
-
188
- if remote_vote_count > self._total_votes:
189
- logger.info(f"Found {remote_vote_count - self._total_votes} new votes on hub")
190
- await self._sync_with_hub()
191
- else:
192
- logger.info("No new votes found on hub")
193
-
194
- except Exception as e:
195
- logger.error(f"Error checking for new votes: {str(e)}")
196
-
197
  async def _load_existing_votes(self):
198
  """Load existing votes from file"""
199
  if not self.votes_file.exists():
@@ -261,7 +260,13 @@ class VoteService(HuggingFaceService):
261
  def _add_vote_to_memory(self, vote: Dict[str, Any]):
262
  """Add vote to memory structures"""
263
  try:
264
- check_tuple = (vote["model"], vote["revision"], vote["username"])
 
 
 
 
 
 
265
 
266
  # Skip if we already have this vote
267
  if check_tuple in self.vote_check_set:
@@ -280,13 +285,19 @@ class VoteService(HuggingFaceService):
280
  self._votes_by_user[vote["username"]].append(vote)
281
 
282
  except KeyError as e:
283
- logger.error(f"Malformed vote data, missing key: {str(e)}")
284
  except Exception as e:
285
- logger.error(f"Error adding vote to memory: {str(e)}")
286
 
287
  async def get_user_votes(self, user_id: str) -> List[Dict[str, Any]]:
288
  """Get all votes from a specific user"""
289
  logger.info(LogFormatter.info(f"Fetching votes for user: {user_id}"))
 
 
 
 
 
 
290
  votes = self._votes_by_user.get(user_id, [])
291
  logger.info(LogFormatter.success(f"Found {len(votes):,} votes"))
292
  return votes
@@ -294,19 +305,31 @@ class VoteService(HuggingFaceService):
294
  async def get_model_votes(self, model_id: str) -> Dict[str, Any]:
295
  """Get all votes for a specific model"""
296
  logger.info(LogFormatter.info(f"Fetching votes for model: {model_id}"))
 
 
 
 
 
 
297
  votes = self._votes_by_model.get(model_id, [])
298
 
299
- # Group votes by revision
300
- votes_by_revision = {}
301
  for vote in votes:
302
- revision = vote["revision"]
303
- if revision not in votes_by_revision:
304
- votes_by_revision[revision] = 0
305
- votes_by_revision[revision] += 1
 
 
 
 
 
 
306
 
307
  stats = {
308
  "Total_Votes": len(votes),
309
- **{f"Revision_{k}": v for k, v in votes_by_revision.items()}
310
  }
311
 
312
  logger.info(LogFormatter.section("VOTE STATISTICS"))
@@ -315,7 +338,7 @@ class VoteService(HuggingFaceService):
315
 
316
  return {
317
  "total_votes": len(votes),
318
- "votes_by_revision": votes_by_revision,
319
  "votes": votes
320
  }
321
 
@@ -337,7 +360,7 @@ class VoteService(HuggingFaceService):
337
  logger.warning(f"Using 'main' as fallback revision for {model_id} after {self._max_retries} failed attempts")
338
  return "main"
339
 
340
- async def add_vote(self, model_id: str, user_id: str, vote_type: str) -> Dict[str, Any]:
341
  """Add a vote for a model"""
342
  try:
343
  self._log_repo_operation("add", f"{HF_ORGANIZATION}/votes", f"Adding {vote_type} vote for {model_id} by {user_id}")
@@ -345,23 +368,48 @@ class VoteService(HuggingFaceService):
345
  stats = {
346
  "Model": model_id,
347
  "User": user_id,
348
- "Type": vote_type
 
349
  }
350
  for line in LogFormatter.tree(stats, "Vote Details"):
351
  logger.info(line)
352
 
353
- revision = await self._get_model_revision(model_id)
354
- check_tuple = (model_id, revision, user_id)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
355
 
356
  if check_tuple in self.vote_check_set:
357
- raise ValueError("Vote already recorded for this model")
358
 
359
  vote = {
360
  "model": model_id,
361
  "revision": revision,
362
  "username": user_id,
363
  "timestamp": datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"),
364
- "vote_type": vote_type
 
365
  }
366
 
367
  # Update local storage
@@ -373,15 +421,18 @@ class VoteService(HuggingFaceService):
373
 
374
  stats = {
375
  "Status": "Success",
376
- "Queue_Size": len(self.votes_to_upload)
 
 
 
 
377
  }
378
  for line in LogFormatter.stats(stats):
379
  logger.info(line)
380
 
381
- # Try to upload if batch size reached
382
- if len(self.votes_to_upload) >= self._upload_batch_size:
383
- logger.info(LogFormatter.info(f"Upload batch size reached ({self._upload_batch_size}), triggering sync"))
384
- await self._sync_with_hub()
385
 
386
  return {"status": "success", "message": "Vote added successfully"}
387
 
 
6
  from pathlib import Path
7
  import aiohttp
8
  from huggingface_hub import HfApi
9
+ import tempfile
10
+ import os
11
 
12
  from app.services.hf_service import HuggingFaceService
13
  from app.config import HF_TOKEN
 
31
  super().__init__()
32
  self.votes_file = cache_config.votes_file
33
  self.votes_to_upload: List[Dict[str, Any]] = []
34
+ self.vote_check_set: Set[Tuple[str, str, str, str]] = set()
35
  self._votes_by_model: Dict[str, List[Dict[str, Any]]] = {}
36
  self._votes_by_user: Dict[str, List[Dict[str, Any]]] = {}
 
37
  self._last_sync = None
38
  self._sync_interval = 300 # 5 minutes
39
  self._total_votes = 0
40
  self._last_vote_timestamp = None
41
  self._max_retries = 3
42
  self._retry_delay = 1 # seconds
 
43
  self.hf_api = HfApi(token=HF_TOKEN)
44
  self._init_done = True
45
 
 
55
  # Ensure votes directory exists
56
  self.votes_file.parent.mkdir(parents=True, exist_ok=True)
57
 
58
+ # Load remote votes
59
+ remote_votes = await self._fetch_remote_votes()
60
+ if remote_votes:
61
+ logger.info(LogFormatter.info(f"Loaded {len(remote_votes)} votes from hub"))
62
+
63
+ # Save to local file
64
+ with open(self.votes_file, 'w') as f:
65
+ for vote in remote_votes:
66
+ json.dump(vote, f)
67
+ f.write('\n')
68
+
69
+ # Load into memory
 
 
 
 
70
  await self._load_existing_votes()
71
  else:
72
+ logger.warning(LogFormatter.warning("No votes found on hub"))
 
 
 
 
73
 
74
  self._initialized = True
75
  self._last_sync = datetime.now(timezone.utc)
 
87
  logger.error(LogFormatter.error("Initialization failed", e))
88
  raise
89
 
90
+ async def _fetch_remote_votes(self) -> List[Dict[str, Any]]:
91
+ """Fetch votes from HF hub"""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
92
  url = f"https://huggingface.co/datasets/{HF_ORGANIZATION}/votes/raw/main/votes_data.jsonl"
93
+ headers = {"Authorization": f"Bearer {self.token}"} if self.token else {}
94
 
95
  try:
96
  async with aiohttp.ClientSession() as session:
97
  async with session.get(url, headers=headers) as response:
98
  if response.status == 200:
99
+ votes = []
100
  async for line in response.content:
101
+ if line.strip():
102
+ try:
103
+ vote = json.loads(line.decode())
104
+ votes.append(vote)
105
+ except json.JSONDecodeError:
106
+ continue
107
+ return votes
108
  else:
109
  logger.error(f"Failed to get remote votes: HTTP {response.status}")
110
+ return []
111
  except Exception as e:
112
+ logger.error(f"Error fetching remote votes: {str(e)}")
113
+ return []
114
+
115
+ async def _check_for_new_votes(self):
116
+ """Check for new votes on the hub and sync if needed"""
117
+ try:
118
+ remote_votes = await self._fetch_remote_votes()
119
+ if len(remote_votes) != self._total_votes:
120
+ logger.info(f"Vote count changed: Local ({self._total_votes}) ≠ Remote ({len(remote_votes)})")
121
+ # Save to local file
122
+ with open(self.votes_file, 'w') as f:
123
+ for vote in remote_votes:
124
+ json.dump(vote, f)
125
+ f.write('\n')
126
+
127
+ # Reload into memory
128
+ await self._load_existing_votes()
129
+ else:
130
+ logger.info("Votes are in sync")
131
+
132
+ except Exception as e:
133
+ logger.error(f"Error checking for new votes: {str(e)}")
134
 
135
  async def _sync_with_hub(self):
136
+ """Sync votes with HuggingFace hub"""
137
  try:
138
  logger.info(LogFormatter.section("VOTE SYNC"))
 
 
 
 
 
 
 
 
 
 
 
 
139
 
140
+ # Get current remote votes
141
+ remote_votes = await self._fetch_remote_votes()
142
+ logger.info(LogFormatter.info(f"Loaded {len(remote_votes)} votes from hub"))
 
 
143
 
144
+ # If we have pending votes to upload
145
+ if self.votes_to_upload:
146
+ logger.info(LogFormatter.info(f"Adding {len(self.votes_to_upload)} pending votes..."))
 
147
 
148
+ # Add new votes to remote votes
149
+ remote_votes.extend(self.votes_to_upload)
150
+
151
+ # Create temporary file with all votes
152
+ with tempfile.NamedTemporaryFile(mode='w', suffix='.jsonl', delete=False) as temp_file:
153
  for vote in remote_votes:
154
+ json.dump(vote, temp_file)
155
+ temp_file.write('\n')
156
+ temp_path = temp_file.name
157
 
158
+ try:
159
+ # Upload JSONL file directly
160
+ self.hf_api.upload_file(
161
+ path_or_fileobj=temp_path,
162
+ path_in_repo="votes_data.jsonl",
163
+ repo_id=f"{HF_ORGANIZATION}/votes",
164
+ repo_type="dataset",
165
+ commit_message=f"Update votes: +{len(self.votes_to_upload)} new votes",
166
+ token=self.token
167
+ )
168
+
169
+ # Clear pending votes only if upload succeeded
170
+ self.votes_to_upload.clear()
171
+ logger.info(LogFormatter.success("Pending votes uploaded successfully"))
172
+
173
+ except Exception as e:
174
+ logger.error(LogFormatter.error("Failed to upload votes to hub", e))
175
+ raise
176
+ finally:
177
+ # Clean up temp file
178
+ os.unlink(temp_path)
179
+
180
+ # Update local state
181
+ with open(self.votes_file, 'w') as f:
182
+ for vote in remote_votes:
183
+ json.dump(vote, f)
184
+ f.write('\n')
185
+
186
+ # Reload votes in memory
187
+ await self._load_existing_votes()
188
+ logger.info(LogFormatter.success("Sync completed successfully"))
189
 
190
  self._last_sync = datetime.now(timezone.utc)
191
 
 
193
  logger.error(LogFormatter.error("Sync failed", e))
194
  raise
195
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
196
  async def _load_existing_votes(self):
197
  """Load existing votes from file"""
198
  if not self.votes_file.exists():
 
260
  def _add_vote_to_memory(self, vote: Dict[str, Any]):
261
  """Add vote to memory structures"""
262
  try:
263
+ # Create a unique identifier tuple that includes precision
264
+ check_tuple = (
265
+ vote["model"],
266
+ vote.get("revision", "main"),
267
+ vote["username"],
268
+ vote.get("precision", "unknown")
269
+ )
270
 
271
  # Skip if we already have this vote
272
  if check_tuple in self.vote_check_set:
 
285
  self._votes_by_user[vote["username"]].append(vote)
286
 
287
  except KeyError as e:
288
+ logger.error(LogFormatter.error("Malformed vote data, missing key", str(e)))
289
  except Exception as e:
290
+ logger.error(LogFormatter.error("Error adding vote to memory", str(e)))
291
 
292
  async def get_user_votes(self, user_id: str) -> List[Dict[str, Any]]:
293
  """Get all votes from a specific user"""
294
  logger.info(LogFormatter.info(f"Fetching votes for user: {user_id}"))
295
+
296
+ # Check if we need to refresh votes
297
+ if (datetime.now(timezone.utc) - self._last_sync).total_seconds() > self._sync_interval:
298
+ logger.info(LogFormatter.info("Cache expired, refreshing votes..."))
299
+ await self._check_for_new_votes()
300
+
301
  votes = self._votes_by_user.get(user_id, [])
302
  logger.info(LogFormatter.success(f"Found {len(votes):,} votes"))
303
  return votes
 
305
  async def get_model_votes(self, model_id: str) -> Dict[str, Any]:
306
  """Get all votes for a specific model"""
307
  logger.info(LogFormatter.info(f"Fetching votes for model: {model_id}"))
308
+
309
+ # Check if we need to refresh votes
310
+ if (datetime.now(timezone.utc) - self._last_sync).total_seconds() > self._sync_interval:
311
+ logger.info(LogFormatter.info("Cache expired, refreshing votes..."))
312
+ await self._check_for_new_votes()
313
+
314
  votes = self._votes_by_model.get(model_id, [])
315
 
316
+ # Group votes by revision and precision
317
+ votes_by_config = {}
318
  for vote in votes:
319
+ revision = vote.get("revision", "main")
320
+ precision = vote.get("precision", "unknown")
321
+ config_key = f"{revision}_{precision}"
322
+ if config_key not in votes_by_config:
323
+ votes_by_config[config_key] = {
324
+ "revision": revision,
325
+ "precision": precision,
326
+ "count": 0
327
+ }
328
+ votes_by_config[config_key]["count"] += 1
329
 
330
  stats = {
331
  "Total_Votes": len(votes),
332
+ **{f"Config_{k}": v["count"] for k, v in votes_by_config.items()}
333
  }
334
 
335
  logger.info(LogFormatter.section("VOTE STATISTICS"))
 
338
 
339
  return {
340
  "total_votes": len(votes),
341
+ "votes_by_config": votes_by_config,
342
  "votes": votes
343
  }
344
 
 
360
  logger.warning(f"Using 'main' as fallback revision for {model_id} after {self._max_retries} failed attempts")
361
  return "main"
362
 
363
+ async def add_vote(self, model_id: str, user_id: str, vote_type: str, vote_data: Dict[str, Any] = None) -> Dict[str, Any]:
364
  """Add a vote for a model"""
365
  try:
366
  self._log_repo_operation("add", f"{HF_ORGANIZATION}/votes", f"Adding {vote_type} vote for {model_id} by {user_id}")
 
368
  stats = {
369
  "Model": model_id,
370
  "User": user_id,
371
+ "Type": vote_type,
372
+ "Config": vote_data or {}
373
  }
374
  for line in LogFormatter.tree(stats, "Vote Details"):
375
  logger.info(line)
376
 
377
+ # Use provided configuration or fallback to model info
378
+ precision = None
379
+ revision = None
380
+
381
+ if vote_data:
382
+ precision = vote_data.get("precision")
383
+ revision = vote_data.get("revision")
384
+
385
+ # If any info is missing, try to get it from model info
386
+ if not all([precision, revision]):
387
+ try:
388
+ model_info = await asyncio.to_thread(self.hf_api.model_info, model_id)
389
+ model_card_data = model_info.cardData if hasattr(model_info, 'cardData') else {}
390
+
391
+ if not precision:
392
+ precision = model_card_data.get("precision", "unknown")
393
+ if not revision:
394
+ revision = model_info.sha
395
+ except Exception as e:
396
+ logger.warning(LogFormatter.warning(f"Failed to get model info: {str(e)}. Using default values."))
397
+ precision = precision or "unknown"
398
+ revision = revision or "main"
399
+
400
+ # Check if vote already exists with this configuration
401
+ check_tuple = (model_id, revision, user_id, precision)
402
 
403
  if check_tuple in self.vote_check_set:
404
+ raise ValueError(f"Vote already recorded for this model configuration (precision: {precision}, revision: {revision[:7] if revision else 'unknown'})")
405
 
406
  vote = {
407
  "model": model_id,
408
  "revision": revision,
409
  "username": user_id,
410
  "timestamp": datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"),
411
+ "vote_type": vote_type,
412
+ "precision": precision
413
  }
414
 
415
  # Update local storage
 
421
 
422
  stats = {
423
  "Status": "Success",
424
+ "Queue_Size": len(self.votes_to_upload),
425
+ "Model_Config": {
426
+ "Precision": precision,
427
+ "Revision": revision[:7] if revision else "unknown"
428
+ }
429
  }
430
  for line in LogFormatter.stats(stats):
431
  logger.info(line)
432
 
433
+ # Force immediate sync
434
+ logger.info(LogFormatter.info("Forcing immediate sync with hub"))
435
+ await self._sync_with_hub()
 
436
 
437
  return {"status": "success", "message": "Vote added successfully"}
438
 
frontend/src/pages/AddModelPage/AddModelPage.js CHANGED
@@ -26,7 +26,7 @@ function AddModelPage() {
26
  }
27
 
28
  return (
29
- <Box sx={{ width: "100%", maxWidth: 1200, margin: "0 auto", padding: 4 }}>
30
  <PageHeader
31
  title="Submit a Model for Evaluation"
32
  subtitle={
 
26
  }
27
 
28
  return (
29
+ <Box sx={{ width: "100%", maxWidth: 1200, margin: "0 auto", py: 4, px: 0 }}>
30
  <PageHeader
31
  title="Submit a Model for Evaluation"
32
  subtitle={
frontend/src/pages/AddModelPage/components/EvaluationQueues/EvaluationQueues.js CHANGED
@@ -192,16 +192,13 @@ const ModelTable = ({ models, emptyMessage, status }) => {
192
  style={{
193
  height: `${rowVirtualizer.getTotalSize()}px`,
194
  padding: 0,
 
 
 
195
  }}
196
  colSpan={columns.length}
197
  >
198
- <div
199
- style={{
200
- position: "relative",
201
- width: "100%",
202
- height: `${rowVirtualizer.getTotalSize()}px`,
203
- }}
204
- >
205
  {rowVirtualizer.getVirtualItems().map((virtualRow) => {
206
  const model = models[virtualRow.index];
207
  const waitTime = formatWaitTime(model.wait_time);
@@ -222,7 +219,6 @@ const ModelTable = ({ models, emptyMessage, status }) => {
222
  hover
223
  >
224
  <TableCell
225
- component="div"
226
  sx={{
227
  flex: `0 0 ${columns[0].width}`,
228
  padding: "12px 16px",
@@ -258,7 +254,6 @@ const ModelTable = ({ models, emptyMessage, status }) => {
258
  </Link>
259
  </TableCell>
260
  <TableCell
261
- component="div"
262
  sx={{
263
  flex: `0 0 ${columns[1].width}`,
264
  padding: "12px 16px",
@@ -274,7 +269,6 @@ const ModelTable = ({ models, emptyMessage, status }) => {
274
  {model.submitter}
275
  </TableCell>
276
  <TableCell
277
- component="div"
278
  align={columns[2].align}
279
  sx={{
280
  flex: `0 0 ${columns[2].width}`,
@@ -303,7 +297,6 @@ const ModelTable = ({ models, emptyMessage, status }) => {
303
  </Tooltip>
304
  </TableCell>
305
  <TableCell
306
- component="div"
307
  align={columns[3].align}
308
  sx={{
309
  flex: `0 0 ${columns[3].width}`,
@@ -320,7 +313,6 @@ const ModelTable = ({ models, emptyMessage, status }) => {
320
  </Typography>
321
  </TableCell>
322
  <TableCell
323
- component="div"
324
  align={columns[4].align}
325
  sx={{
326
  flex: `0 0 ${columns[4].width}`,
@@ -336,7 +328,6 @@ const ModelTable = ({ models, emptyMessage, status }) => {
336
  {model.revision.substring(0, 7)}
337
  </TableCell>
338
  <TableCell
339
- component="div"
340
  align={columns[5].align}
341
  sx={{
342
  flex: `0 0 ${columns[5].width}`,
@@ -351,7 +342,7 @@ const ModelTable = ({ models, emptyMessage, status }) => {
351
  </TableRow>
352
  );
353
  })}
354
- </div>
355
  </TableCell>
356
  </TableRow>
357
  </TableBody>
 
192
  style={{
193
  height: `${rowVirtualizer.getTotalSize()}px`,
194
  padding: 0,
195
+ position: "relative",
196
+ width: "100%",
197
+ height: `${rowVirtualizer.getTotalSize()}px`,
198
  }}
199
  colSpan={columns.length}
200
  >
201
+ <>
 
 
 
 
 
 
202
  {rowVirtualizer.getVirtualItems().map((virtualRow) => {
203
  const model = models[virtualRow.index];
204
  const waitTime = formatWaitTime(model.wait_time);
 
219
  hover
220
  >
221
  <TableCell
 
222
  sx={{
223
  flex: `0 0 ${columns[0].width}`,
224
  padding: "12px 16px",
 
254
  </Link>
255
  </TableCell>
256
  <TableCell
 
257
  sx={{
258
  flex: `0 0 ${columns[1].width}`,
259
  padding: "12px 16px",
 
269
  {model.submitter}
270
  </TableCell>
271
  <TableCell
 
272
  align={columns[2].align}
273
  sx={{
274
  flex: `0 0 ${columns[2].width}`,
 
297
  </Tooltip>
298
  </TableCell>
299
  <TableCell
 
300
  align={columns[3].align}
301
  sx={{
302
  flex: `0 0 ${columns[3].width}`,
 
313
  </Typography>
314
  </TableCell>
315
  <TableCell
 
316
  align={columns[4].align}
317
  sx={{
318
  flex: `0 0 ${columns[4].width}`,
 
328
  {model.revision.substring(0, 7)}
329
  </TableCell>
330
  <TableCell
 
331
  align={columns[5].align}
332
  sx={{
333
  flex: `0 0 ${columns[5].width}`,
 
342
  </TableRow>
343
  );
344
  })}
345
+ </>
346
  </TableCell>
347
  </TableRow>
348
  </TableBody>
frontend/src/pages/QuotePage/QuotePage.js CHANGED
@@ -226,7 +226,7 @@ const CitationBlock = ({ citation, title, authors, url, type }) => {
226
 
227
  function QuotePage() {
228
  return (
229
- <Box sx={{ width: "100%", maxWidth: 1200, margin: "0 auto", padding: 4 }}>
230
  <PageHeader
231
  title="Citation Information"
232
  subtitle="How to cite the Open LLM Leaderboard in your work"
 
226
 
227
  function QuotePage() {
228
  return (
229
+ <Box sx={{ width: "100%", maxWidth: 1200, margin: "0 auto", py: 4, px: 0 }}>
230
  <PageHeader
231
  title="Citation Information"
232
  subtitle="How to cite the Open LLM Leaderboard in your work"
frontend/src/pages/VoteModelPage/VoteModelPage.js CHANGED
@@ -74,6 +74,12 @@ function VoteModelPage() {
74
  const [loadingModels, setLoadingModels] = useState(true);
75
  const [error, setError] = useState(null);
76
  const [userVotes, setUserVotes] = useState(new Set());
 
 
 
 
 
 
77
 
78
  const formatWaitTime = (submissionTime) => {
79
  if (!submissionTime) return "N/A";
@@ -98,173 +104,209 @@ function VoteModelPage() {
98
  return `${diffInWeeks}w`;
99
  };
100
 
101
- // Fetch user's votes
102
- useEffect(() => {
103
- const fetchUserVotes = async () => {
104
- if (!isAuthenticated || !user) return;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
105
 
106
- try {
107
- // Récupérer les votes du localStorage
108
- const localVotes = JSON.parse(
109
- localStorage.getItem(`votes_${user.username}`) || "[]"
110
- );
111
- const localVotesSet = new Set(localVotes);
 
112
 
113
- // Récupérer les votes du serveur
114
- const response = await fetch(`/api/votes/user/${user.username}`);
115
- if (!response.ok) {
116
- throw new Error("Failed to fetch user votes");
117
- }
118
- const data = await response.json();
119
 
120
- // Fusionner les votes du serveur avec les votes locaux
121
- const votedModels = new Set([
122
- ...data.map((vote) => vote.model),
123
- ...localVotesSet,
124
- ]);
125
- setUserVotes(votedModels);
126
- } catch (err) {
127
- console.error("Error fetching user votes:", err);
128
  }
129
- };
130
 
131
- fetchUserVotes();
132
- }, [isAuthenticated, user]);
 
 
133
 
 
134
  useEffect(() => {
135
- const fetchModels = async () => {
136
  try {
137
- const response = await fetch("/api/models/pending");
138
- if (!response.ok) {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
139
  throw new Error("Failed to fetch pending models");
140
  }
141
- const data = await response.json();
142
 
143
  // Fetch votes for each model
144
  const modelsWithVotes = await Promise.all(
145
- data.map(async (model) => {
146
- const [provider, modelName] = model.name.split("/");
147
- const votesResponse = await fetch(
148
- `/api/votes/model/${provider}/${modelName}`
149
- );
150
- const votesData = await votesResponse.json();
151
-
152
- // Calculate total vote score from votes_by_revision
153
- const totalScore = Object.values(
154
- votesData.votes_by_revision || {}
155
- ).reduce((a, b) => a + b, 0);
156
-
157
- // Calculate wait time based on submission_time from model data
158
- const waitTimeDisplay = formatWaitTime(model.submission_time);
159
-
160
- return {
161
- ...model,
162
- votes: totalScore,
163
- votes_by_revision: votesData.votes_by_revision,
164
- wait_time: waitTimeDisplay,
165
- hasVoted: userVotes.has(model.name),
166
- };
 
 
 
 
 
 
 
 
 
 
 
 
 
167
  })
168
  );
169
 
170
- // Sort models by vote score in descending order
171
- const sortedModels = modelsWithVotes.sort((a, b) => b.votes - a.votes);
172
-
173
  setPendingModels(sortedModels);
174
  } catch (err) {
 
175
  setError(err.message);
176
  } finally {
177
  setLoadingModels(false);
178
  }
179
  };
180
 
181
- fetchModels();
182
- }, [userVotes]);
183
 
184
- const handleVote = async (modelName) => {
185
  if (!isAuthenticated) return;
186
 
187
  try {
188
- // Disable the button immediately by adding the model to userVotes
189
- setUserVotes((prev) => {
190
- const newSet = new Set([...prev, modelName]);
191
- // Sauvegarder dans le localStorage
192
- if (user) {
193
- const localVotes = JSON.parse(
194
- localStorage.getItem(`votes_${user.username}`) || "[]"
195
- );
196
- if (!localVotes.includes(modelName)) {
197
- localVotes.push(modelName);
198
- localStorage.setItem(
199
- `votes_${user.username}`,
200
- JSON.stringify(localVotes)
201
- );
202
- }
203
- }
204
- return newSet;
205
- });
206
 
207
- // Split modelName into provider and model
208
- const [provider, model] = modelName.split("/");
209
 
210
  const response = await fetch(
211
- `/api/votes/${modelName}?vote_type=up&user_id=${user.username}`,
212
  {
213
  method: "POST",
214
  headers: {
215
  "Content-Type": "application/json",
216
  },
 
 
 
 
217
  }
218
  );
219
 
220
  if (!response.ok) {
221
- // Si le vote échoue, on retire le vote du localStorage et du state
222
- setUserVotes((prev) => {
223
- const newSet = new Set(prev);
224
- newSet.delete(modelName);
225
- if (user) {
226
- const localVotes = JSON.parse(
227
- localStorage.getItem(`votes_${user.username}`) || "[]"
228
- );
229
- const updatedVotes = localVotes.filter(
230
- (vote) => vote !== modelName
231
- );
232
- localStorage.setItem(
233
- `votes_${user.username}`,
234
- JSON.stringify(updatedVotes)
235
- );
236
- }
237
- return newSet;
238
- });
239
  throw new Error("Failed to submit vote");
240
  }
241
 
242
- // Refresh votes for this model
 
 
243
  const votesResponse = await fetch(
244
- `/api/votes/model/${provider}/${model}`
245
  );
246
- const votesData = await votesResponse.json();
247
 
248
- // Calculate total vote score from votes_by_revision
249
- const totalScore = Object.values(
250
- votesData.votes_by_revision || {}
251
- ).reduce((a, b) => a + b, 0);
 
 
252
 
253
  // Update model and resort the list
254
  setPendingModels((models) => {
255
- const updatedModels = models.map((model) =>
256
- model.name === modelName
257
  ? {
258
- ...model,
259
- votes: totalScore,
260
- votes_by_revision: votesData.votes_by_revision,
 
261
  }
262
- : model
263
  );
264
- return updatedModels.sort((a, b) => b.votes - a.votes);
 
 
265
  });
 
 
 
266
  } catch (err) {
 
267
  setError(err.message);
 
 
 
 
 
 
268
  }
269
  };
270
 
@@ -284,7 +326,15 @@ function VoteModelPage() {
284
  }
285
 
286
  return (
287
- <Box sx={{ width: "100%", maxWidth: 1200, margin: "0 auto", padding: 4 }}>
 
 
 
 
 
 
 
 
288
  <PageHeader
289
  title="Vote for the Next Models"
290
  subtitle={
@@ -441,7 +491,7 @@ function VoteModelPage() {
441
  {pendingModels.map((model, index) => {
442
  const isTopThree = index < 3;
443
  return (
444
- <React.Fragment key={model.name}>
445
  {index > 0 && <Divider />}
446
  <ListItem
447
  sx={{
@@ -462,37 +512,74 @@ function VoteModelPage() {
462
  <Stack spacing={1}>
463
  {/* Model name and link */}
464
  <Stack direction="row" spacing={1} alignItems="center">
465
- <Link
466
- href={`https://huggingface.co/${model.name}`}
467
- target="_blank"
468
- rel="noopener noreferrer"
469
- sx={{
470
- textDecoration: "none",
471
- color: "primary.main",
472
- fontWeight: 500,
473
- "&:hover": {
474
- textDecoration: "underline",
475
- },
476
- }}
477
  >
478
- {model.name}
479
- </Link>
480
- <IconButton
481
- size="small"
482
- href={`https://huggingface.co/${model.name}`}
483
- target="_blank"
484
- rel="noopener noreferrer"
485
- sx={{
486
- ml: 0.5,
487
- p: 0.5,
488
- color: "action.active",
489
- "&:hover": {
490
  color: "primary.main",
491
- },
492
- }}
493
- >
494
- <OpenInNewIcon sx={{ fontSize: "1rem" }} />
495
- </IconButton>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
496
  </Stack>
497
  {/* Metadata row */}
498
  <Stack direction="row" spacing={2} alignItems="center">
@@ -600,8 +687,12 @@ function VoteModelPage() {
600
  <Button
601
  variant={model.hasVoted ? "contained" : "outlined"}
602
  size="large"
603
- onClick={() => handleVote(model.name)}
604
- disabled={!isAuthenticated || model.hasVoted}
 
 
 
 
605
  color="primary"
606
  sx={{
607
  minWidth: "100px",
@@ -629,7 +720,9 @@ function VoteModelPage() {
629
  }),
630
  }}
631
  >
632
- {model.hasVoted ? (
 
 
633
  <Stack
634
  direction="row"
635
  spacing={0.5}
 
74
  const [loadingModels, setLoadingModels] = useState(true);
75
  const [error, setError] = useState(null);
76
  const [userVotes, setUserVotes] = useState(new Set());
77
+ const [loadingVotes, setLoadingVotes] = useState({});
78
+
79
+ // Create a unique identifier for a model
80
+ const getModelUniqueId = (model) => {
81
+ return `${model.name}_${model.precision}_${model.revision}`;
82
+ };
83
 
84
  const formatWaitTime = (submissionTime) => {
85
  if (!submissionTime) return "N/A";
 
104
  return `${diffInWeeks}w`;
105
  };
106
 
107
+ const getConfigVotes = (votesData, model) => {
108
+ // Afficher les données pour le debug
109
+ console.log("Checking votes for model:", {
110
+ model_name: model.name,
111
+ precision: model.precision,
112
+ revision: model.revision,
113
+ votes_data: votesData,
114
+ });
115
+
116
+ // Parcourir toutes les configurations pour trouver celle qui correspond
117
+ for (const [key, config] of Object.entries(votesData.votes_by_config)) {
118
+ if (
119
+ config.precision === model.precision &&
120
+ config.revision === model.revision
121
+ ) {
122
+ return config.count;
123
+ }
124
+ }
125
+ return 0;
126
+ };
127
 
128
+ const sortModels = (models) => {
129
+ // Trier d'abord par nombre de votes décroissant, puis par soumission de l'utilisateur
130
+ return [...models].sort((a, b) => {
131
+ // Comparer d'abord le nombre de votes
132
+ if (b.votes !== a.votes) {
133
+ return b.votes - a.votes;
134
+ }
135
 
136
+ // Si l'utilisateur est connecté, mettre ses modèles en priorité
137
+ if (user) {
138
+ const aIsUserModel = a.submitter === user.username;
139
+ const bIsUserModel = b.submitter === user.username;
 
 
140
 
141
+ if (aIsUserModel && !bIsUserModel) return -1;
142
+ if (!aIsUserModel && bIsUserModel) return 1;
 
 
 
 
 
 
143
  }
 
144
 
145
+ // Si égalité, trier par date de soumission (le plus récent d'abord)
146
+ return new Date(b.submission_time) - new Date(a.submission_time);
147
+ });
148
+ };
149
 
150
+ // Fetch user's votes and models together
151
  useEffect(() => {
152
+ const fetchData = async () => {
153
  try {
154
+ setLoadingModels(true);
155
+ setError(null);
156
+
157
+ // Fetch user votes only if authenticated
158
+ let votedModels = new Set();
159
+ if (isAuthenticated && user) {
160
+ const userVotesResponse = await fetch(
161
+ `/api/votes/user/${user.username}`
162
+ );
163
+ if (!userVotesResponse.ok) {
164
+ throw new Error("Failed to fetch user votes");
165
+ }
166
+ const votesData = await userVotesResponse.json();
167
+ const userVotes = Array.isArray(votesData) ? votesData : [];
168
+
169
+ userVotes.forEach((vote) => {
170
+ const uniqueId = `${vote.model}_${vote.precision || "unknown"}_${
171
+ vote.revision || "main"
172
+ }`;
173
+ votedModels.add(uniqueId);
174
+ });
175
+ }
176
+ setUserVotes(votedModels);
177
+
178
+ // Fetch pending models
179
+ const pendingModelsResponse = await fetch("/api/models/pending");
180
+ if (!pendingModelsResponse.ok) {
181
  throw new Error("Failed to fetch pending models");
182
  }
183
+ const modelsData = await pendingModelsResponse.json();
184
 
185
  // Fetch votes for each model
186
  const modelsWithVotes = await Promise.all(
187
+ modelsData.map(async (model) => {
188
+ try {
189
+ const [provider, modelName] = model.name.split("/");
190
+ const votesResponse = await fetch(
191
+ `/api/votes/model/${provider}/${modelName}`
192
+ );
193
+
194
+ if (!votesResponse.ok) {
195
+ return {
196
+ ...model,
197
+ votes: 0,
198
+ votes_by_config: {},
199
+ wait_time: formatWaitTime(model.submission_time),
200
+ hasVoted: votedModels.has(getModelUniqueId(model)),
201
+ };
202
+ }
203
+
204
+ const votesData = await votesResponse.json();
205
+ return {
206
+ ...model,
207
+ votes: getConfigVotes(votesData, model),
208
+ votes_by_config: votesData.votes_by_config || {},
209
+ wait_time: formatWaitTime(model.submission_time),
210
+ hasVoted: votedModels.has(getModelUniqueId(model)),
211
+ };
212
+ } catch (err) {
213
+ console.error(`Error fetching votes for ${model.name}:`, err);
214
+ return {
215
+ ...model,
216
+ votes: 0,
217
+ votes_by_config: {},
218
+ wait_time: formatWaitTime(model.submission_time),
219
+ hasVoted: votedModels.has(getModelUniqueId(model)),
220
+ };
221
+ }
222
  })
223
  );
224
 
225
+ // Sort models
226
+ const sortedModels = sortModels(modelsWithVotes);
 
227
  setPendingModels(sortedModels);
228
  } catch (err) {
229
+ console.error("Error fetching data:", err);
230
  setError(err.message);
231
  } finally {
232
  setLoadingModels(false);
233
  }
234
  };
235
 
236
+ fetchData();
237
+ }, [isAuthenticated, user]);
238
 
239
+ const handleVote = async (model) => {
240
  if (!isAuthenticated) return;
241
 
242
  try {
243
+ setError(null);
244
+ // Set loading state for this specific model
245
+ setLoadingVotes((prev) => ({ ...prev, [getModelUniqueId(model)]: true }));
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
246
 
247
+ // Encode model name for URL
248
+ const encodedModelName = encodeURIComponent(model.name);
249
 
250
  const response = await fetch(
251
+ `/api/votes/${encodedModelName}?vote_type=up&user_id=${user.username}`,
252
  {
253
  method: "POST",
254
  headers: {
255
  "Content-Type": "application/json",
256
  },
257
+ body: JSON.stringify({
258
+ precision: model.precision,
259
+ revision: model.revision,
260
+ }),
261
  }
262
  );
263
 
264
  if (!response.ok) {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
265
  throw new Error("Failed to submit vote");
266
  }
267
 
268
+ // Refresh votes for this model with cache bypass
269
+ const [provider, modelName] = model.name.split("/");
270
+ const timestamp = Date.now();
271
  const votesResponse = await fetch(
272
+ `/api/votes/model/${provider}/${modelName}?nocache=${timestamp}`
273
  );
 
274
 
275
+ if (!votesResponse.ok) {
276
+ throw new Error("Failed to fetch updated votes");
277
+ }
278
+
279
+ const votesData = await votesResponse.json();
280
+ console.log(`Updated votes for ${model.name}:`, votesData); // Debug log
281
 
282
  // Update model and resort the list
283
  setPendingModels((models) => {
284
+ const updatedModels = models.map((m) =>
285
+ getModelUniqueId(m) === getModelUniqueId(model)
286
  ? {
287
+ ...m,
288
+ votes: getConfigVotes(votesData, m),
289
+ votes_by_config: votesData.votes_by_config || {},
290
+ hasVoted: true,
291
  }
292
+ : m
293
  );
294
+ const sortedModels = sortModels(updatedModels);
295
+ console.log("Updated and sorted models:", sortedModels); // Debug log
296
+ return sortedModels;
297
  });
298
+
299
+ // Update user votes with unique ID
300
+ setUserVotes((prev) => new Set([...prev, getModelUniqueId(model)]));
301
  } catch (err) {
302
+ console.error("Error voting:", err);
303
  setError(err.message);
304
+ } finally {
305
+ // Clear loading state for this model
306
+ setLoadingVotes((prev) => ({
307
+ ...prev,
308
+ [getModelUniqueId(model)]: false,
309
+ }));
310
  }
311
  };
312
 
 
326
  }
327
 
328
  return (
329
+ <Box
330
+ sx={{
331
+ width: "100%",
332
+ maxWidth: 1200,
333
+ margin: "0 auto",
334
+ py: 4,
335
+ px: 0,
336
+ }}
337
+ >
338
  <PageHeader
339
  title="Vote for the Next Models"
340
  subtitle={
 
491
  {pendingModels.map((model, index) => {
492
  const isTopThree = index < 3;
493
  return (
494
+ <React.Fragment key={getModelUniqueId(model)}>
495
  {index > 0 && <Divider />}
496
  <ListItem
497
  sx={{
 
512
  <Stack spacing={1}>
513
  {/* Model name and link */}
514
  <Stack direction="row" spacing={1} alignItems="center">
515
+ <Stack
516
+ direction="row"
517
+ spacing={1}
518
+ alignItems="center"
519
+ sx={{ flexGrow: 1 }}
 
 
 
 
 
 
 
520
  >
521
+ <Link
522
+ href={`https://huggingface.co/${model.name}`}
523
+ target="_blank"
524
+ rel="noopener noreferrer"
525
+ sx={{
526
+ textDecoration: "none",
 
 
 
 
 
 
527
  color: "primary.main",
528
+ fontWeight: 500,
529
+ "&:hover": {
530
+ textDecoration: "underline",
531
+ },
532
+ }}
533
+ >
534
+ {model.name}
535
+ </Link>
536
+ <IconButton
537
+ size="small"
538
+ href={`https://huggingface.co/${model.name}`}
539
+ target="_blank"
540
+ rel="noopener noreferrer"
541
+ sx={{
542
+ ml: 0.5,
543
+ p: 0.5,
544
+ color: "action.active",
545
+ "&:hover": {
546
+ color: "primary.main",
547
+ },
548
+ }}
549
+ >
550
+ <OpenInNewIcon sx={{ fontSize: "1rem" }} />
551
+ </IconButton>
552
+ </Stack>
553
+ <Stack direction="row" spacing={1}>
554
+ <Chip
555
+ label={model.precision}
556
+ size="small"
557
+ variant="outlined"
558
+ sx={{
559
+ borderColor: "grey.300",
560
+ bgcolor: "grey.50",
561
+ "& .MuiChip-label": {
562
+ fontSize: "0.75rem",
563
+ fontWeight: 600,
564
+ color: "text.secondary",
565
+ },
566
+ }}
567
+ />
568
+ <Chip
569
+ label={`rev: ${model.revision.slice(0, 7)}`}
570
+ size="small"
571
+ variant="outlined"
572
+ sx={{
573
+ borderColor: "grey.300",
574
+ bgcolor: "grey.50",
575
+ "& .MuiChip-label": {
576
+ fontSize: "0.75rem",
577
+ fontWeight: 600,
578
+ color: "text.secondary",
579
+ },
580
+ }}
581
+ />
582
+ </Stack>
583
  </Stack>
584
  {/* Metadata row */}
585
  <Stack direction="row" spacing={2} alignItems="center">
 
687
  <Button
688
  variant={model.hasVoted ? "contained" : "outlined"}
689
  size="large"
690
+ onClick={() => handleVote(model)}
691
+ disabled={
692
+ !isAuthenticated ||
693
+ model.hasVoted ||
694
+ loadingVotes[getModelUniqueId(model)]
695
+ }
696
  color="primary"
697
  sx={{
698
  minWidth: "100px",
 
720
  }),
721
  }}
722
  >
723
+ {loadingVotes[getModelUniqueId(model)] ? (
724
+ <CircularProgress size={24} color="inherit" />
725
+ ) : model.hasVoted ? (
726
  <Stack
727
  direction="row"
728
  spacing={0.5}